Switch to python3
This commit is contained in:
parent
531041e89a
commit
9ba4b6a91a
5286 changed files with 677347 additions and 576888 deletions
|
|
@ -1,603 +0,0 @@
|
|||
"""HTTP server base class.
|
||||
|
||||
Note: the class in this module doesn't implement any HTTP request; see
|
||||
SimpleHTTPServer for simple implementations of GET, HEAD and POST
|
||||
(including CGI scripts). It does, however, optionally implement HTTP/1.1
|
||||
persistent connections, as of version 0.3.
|
||||
|
||||
Contents:
|
||||
|
||||
- BaseHTTPRequestHandler: HTTP request handler base class
|
||||
- test: test function
|
||||
|
||||
XXX To do:
|
||||
|
||||
- log requests even later (to capture byte count)
|
||||
- log user-agent header and other interesting goodies
|
||||
- send error log to separate file
|
||||
"""
|
||||
|
||||
|
||||
# See also:
|
||||
#
|
||||
# HTTP Working Group T. Berners-Lee
|
||||
# INTERNET-DRAFT R. T. Fielding
|
||||
# <draft-ietf-http-v10-spec-00.txt> H. Frystyk Nielsen
|
||||
# Expires September 8, 1995 March 8, 1995
|
||||
#
|
||||
# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
|
||||
#
|
||||
# and
|
||||
#
|
||||
# Network Working Group R. Fielding
|
||||
# Request for Comments: 2616 et al
|
||||
# Obsoletes: 2068 June 1999
|
||||
# Category: Standards Track
|
||||
#
|
||||
# URL: http://www.faqs.org/rfcs/rfc2616.html
|
||||
|
||||
# Log files
|
||||
# ---------
|
||||
#
|
||||
# Here's a quote from the NCSA httpd docs about log file format.
|
||||
#
|
||||
# | The logfile format is as follows. Each line consists of:
|
||||
# |
|
||||
# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
|
||||
# |
|
||||
# | host: Either the DNS name or the IP number of the remote client
|
||||
# | rfc931: Any information returned by identd for this person,
|
||||
# | - otherwise.
|
||||
# | authuser: If user sent a userid for authentication, the user name,
|
||||
# | - otherwise.
|
||||
# | DD: Day
|
||||
# | Mon: Month (calendar name)
|
||||
# | YYYY: Year
|
||||
# | hh: hour (24-hour format, the machine's timezone)
|
||||
# | mm: minutes
|
||||
# | ss: seconds
|
||||
# | request: The first line of the HTTP request as sent by the client.
|
||||
# | ddd: the status code returned by the server, - if not available.
|
||||
# | bbbb: the total number of bytes sent,
|
||||
# | *not including the HTTP/1.0 header*, - if not available
|
||||
# |
|
||||
# | You can determine the name of the file accessed through request.
|
||||
#
|
||||
# (Actually, the latter is only true if you know the server configuration
|
||||
# at the time the request was made!)
|
||||
|
||||
__version__ = "0.3"
|
||||
|
||||
__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
|
||||
|
||||
import sys
|
||||
import time
|
||||
import socket # For gethostbyaddr()
|
||||
from warnings import filterwarnings, catch_warnings
|
||||
with catch_warnings():
|
||||
if sys.py3kwarning:
|
||||
filterwarnings("ignore", ".*mimetools has been removed",
|
||||
DeprecationWarning)
|
||||
import mimetools
|
||||
import SocketServer
|
||||
|
||||
# Default error message template
|
||||
DEFAULT_ERROR_MESSAGE = """\
|
||||
<head>
|
||||
<title>Error response</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Error response</h1>
|
||||
<p>Error code %(code)d.
|
||||
<p>Message: %(message)s.
|
||||
<p>Error code explanation: %(code)s = %(explain)s.
|
||||
</body>
|
||||
"""
|
||||
|
||||
DEFAULT_ERROR_CONTENT_TYPE = "text/html"
|
||||
|
||||
def _quote_html(html):
|
||||
return html.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
|
||||
class HTTPServer(SocketServer.TCPServer):
|
||||
|
||||
allow_reuse_address = 1 # Seems to make sense in testing environment
|
||||
|
||||
def server_bind(self):
|
||||
"""Override server_bind to store the server name."""
|
||||
SocketServer.TCPServer.server_bind(self)
|
||||
host, port = self.socket.getsockname()[:2]
|
||||
self.server_name = socket.getfqdn(host)
|
||||
self.server_port = port
|
||||
|
||||
|
||||
class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
|
||||
|
||||
"""HTTP request handler base class.
|
||||
|
||||
The following explanation of HTTP serves to guide you through the
|
||||
code as well as to expose any misunderstandings I may have about
|
||||
HTTP (so you don't need to read the code to figure out I'm wrong
|
||||
:-).
|
||||
|
||||
HTTP (HyperText Transfer Protocol) is an extensible protocol on
|
||||
top of a reliable stream transport (e.g. TCP/IP). The protocol
|
||||
recognizes three parts to a request:
|
||||
|
||||
1. One line identifying the request type and path
|
||||
2. An optional set of RFC-822-style headers
|
||||
3. An optional data part
|
||||
|
||||
The headers and data are separated by a blank line.
|
||||
|
||||
The first line of the request has the form
|
||||
|
||||
<command> <path> <version>
|
||||
|
||||
where <command> is a (case-sensitive) keyword such as GET or POST,
|
||||
<path> is a string containing path information for the request,
|
||||
and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
|
||||
<path> is encoded using the URL encoding scheme (using %xx to signify
|
||||
the ASCII character with hex code xx).
|
||||
|
||||
The specification specifies that lines are separated by CRLF but
|
||||
for compatibility with the widest range of clients recommends
|
||||
servers also handle LF. Similarly, whitespace in the request line
|
||||
is treated sensibly (allowing multiple spaces between components
|
||||
and allowing trailing whitespace).
|
||||
|
||||
Similarly, for output, lines ought to be separated by CRLF pairs
|
||||
but most clients grok LF characters just fine.
|
||||
|
||||
If the first line of the request has the form
|
||||
|
||||
<command> <path>
|
||||
|
||||
(i.e. <version> is left out) then this is assumed to be an HTTP
|
||||
0.9 request; this form has no optional headers and data part and
|
||||
the reply consists of just the data.
|
||||
|
||||
The reply form of the HTTP 1.x protocol again has three parts:
|
||||
|
||||
1. One line giving the response code
|
||||
2. An optional set of RFC-822-style headers
|
||||
3. The data
|
||||
|
||||
Again, the headers and data are separated by a blank line.
|
||||
|
||||
The response code line has the form
|
||||
|
||||
<version> <responsecode> <responsestring>
|
||||
|
||||
where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
|
||||
<responsecode> is a 3-digit response code indicating success or
|
||||
failure of the request, and <responsestring> is an optional
|
||||
human-readable string explaining what the response code means.
|
||||
|
||||
This server parses the request and the headers, and then calls a
|
||||
function specific to the request type (<command>). Specifically,
|
||||
a request SPAM will be handled by a method do_SPAM(). If no
|
||||
such method exists the server sends an error response to the
|
||||
client. If it exists, it is called with no arguments:
|
||||
|
||||
do_SPAM()
|
||||
|
||||
Note that the request name is case sensitive (i.e. SPAM and spam
|
||||
are different requests).
|
||||
|
||||
The various request details are stored in instance variables:
|
||||
|
||||
- client_address is the client IP address in the form (host,
|
||||
port);
|
||||
|
||||
- command, path and version are the broken-down request line;
|
||||
|
||||
- headers is an instance of mimetools.Message (or a derived
|
||||
class) containing the header information;
|
||||
|
||||
- rfile is a file object open for reading positioned at the
|
||||
start of the optional input data part;
|
||||
|
||||
- wfile is a file object open for writing.
|
||||
|
||||
IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
|
||||
|
||||
The first thing to be written must be the response line. Then
|
||||
follow 0 or more header lines, then a blank line, and then the
|
||||
actual data (if any). The meaning of the header lines depends on
|
||||
the command executed by the server; in most cases, when data is
|
||||
returned, there should be at least one header line of the form
|
||||
|
||||
Content-type: <type>/<subtype>
|
||||
|
||||
where <type> and <subtype> should be registered MIME types,
|
||||
e.g. "text/html" or "text/plain".
|
||||
|
||||
"""
|
||||
|
||||
# The Python system version, truncated to its first component.
|
||||
sys_version = "Python/" + sys.version.split()[0]
|
||||
|
||||
# The server software version. You may want to override this.
|
||||
# The format is multiple whitespace-separated strings,
|
||||
# where each string is of the form name[/version].
|
||||
server_version = "BaseHTTP/" + __version__
|
||||
|
||||
# The default request version. This only affects responses up until
|
||||
# the point where the request line is parsed, so it mainly decides what
|
||||
# the client gets back when sending a malformed request line.
|
||||
# Most web servers default to HTTP 0.9, i.e. don't send a status line.
|
||||
default_request_version = "HTTP/0.9"
|
||||
|
||||
def parse_request(self):
|
||||
"""Parse a request (internal).
|
||||
|
||||
The request should be stored in self.raw_requestline; the results
|
||||
are in self.command, self.path, self.request_version and
|
||||
self.headers.
|
||||
|
||||
Return True for success, False for failure; on failure, an
|
||||
error is sent back.
|
||||
|
||||
"""
|
||||
self.command = None # set in case of error on the first line
|
||||
self.request_version = version = self.default_request_version
|
||||
self.close_connection = 1
|
||||
requestline = self.raw_requestline
|
||||
requestline = requestline.rstrip('\r\n')
|
||||
self.requestline = requestline
|
||||
words = requestline.split()
|
||||
if len(words) == 3:
|
||||
command, path, version = words
|
||||
if version[:5] != 'HTTP/':
|
||||
self.send_error(400, "Bad request version (%r)" % version)
|
||||
return False
|
||||
try:
|
||||
base_version_number = version.split('/', 1)[1]
|
||||
version_number = base_version_number.split(".")
|
||||
# RFC 2145 section 3.1 says there can be only one "." and
|
||||
# - major and minor numbers MUST be treated as
|
||||
# separate integers;
|
||||
# - HTTP/2.4 is a lower version than HTTP/2.13, which in
|
||||
# turn is lower than HTTP/12.3;
|
||||
# - Leading zeros MUST be ignored by recipients.
|
||||
if len(version_number) != 2:
|
||||
raise ValueError
|
||||
version_number = int(version_number[0]), int(version_number[1])
|
||||
except (ValueError, IndexError):
|
||||
self.send_error(400, "Bad request version (%r)" % version)
|
||||
return False
|
||||
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
|
||||
self.close_connection = 0
|
||||
if version_number >= (2, 0):
|
||||
self.send_error(505,
|
||||
"Invalid HTTP Version (%s)" % base_version_number)
|
||||
return False
|
||||
elif len(words) == 2:
|
||||
command, path = words
|
||||
self.close_connection = 1
|
||||
if command != 'GET':
|
||||
self.send_error(400,
|
||||
"Bad HTTP/0.9 request type (%r)" % command)
|
||||
return False
|
||||
elif not words:
|
||||
return False
|
||||
else:
|
||||
self.send_error(400, "Bad request syntax (%r)" % requestline)
|
||||
return False
|
||||
self.command, self.path, self.request_version = command, path, version
|
||||
|
||||
# Examine the headers and look for a Connection directive
|
||||
self.headers = self.MessageClass(self.rfile, 0)
|
||||
|
||||
conntype = self.headers.get('Connection', "")
|
||||
if conntype.lower() == 'close':
|
||||
self.close_connection = 1
|
||||
elif (conntype.lower() == 'keep-alive' and
|
||||
self.protocol_version >= "HTTP/1.1"):
|
||||
self.close_connection = 0
|
||||
return True
|
||||
|
||||
def handle_one_request(self):
|
||||
"""Handle a single HTTP request.
|
||||
|
||||
You normally don't need to override this method; see the class
|
||||
__doc__ string for information on how to handle specific HTTP
|
||||
commands such as GET and POST.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.raw_requestline = self.rfile.readline(65537)
|
||||
if len(self.raw_requestline) > 65536:
|
||||
self.requestline = ''
|
||||
self.request_version = ''
|
||||
self.command = ''
|
||||
self.send_error(414)
|
||||
return
|
||||
if not self.raw_requestline:
|
||||
self.close_connection = 1
|
||||
return
|
||||
if not self.parse_request():
|
||||
# An error code has been sent, just exit
|
||||
return
|
||||
mname = 'do_' + self.command
|
||||
if not hasattr(self, mname):
|
||||
self.send_error(501, "Unsupported method (%r)" % self.command)
|
||||
return
|
||||
method = getattr(self, mname)
|
||||
method()
|
||||
self.wfile.flush() #actually send the response if not already done.
|
||||
except socket.timeout, e:
|
||||
#a read or a write timed out. Discard this connection
|
||||
self.log_error("Request timed out: %r", e)
|
||||
self.close_connection = 1
|
||||
return
|
||||
|
||||
def handle(self):
|
||||
"""Handle multiple requests if necessary."""
|
||||
self.close_connection = 1
|
||||
|
||||
self.handle_one_request()
|
||||
while not self.close_connection:
|
||||
self.handle_one_request()
|
||||
|
||||
def send_error(self, code, message=None):
|
||||
"""Send and log an error reply.
|
||||
|
||||
Arguments are the error code, and a detailed message.
|
||||
The detailed message defaults to the short entry matching the
|
||||
response code.
|
||||
|
||||
This sends an error response (so it must be called before any
|
||||
output has been generated), logs the error, and finally sends
|
||||
a piece of HTML explaining the error to the user.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
short, long = self.responses[code]
|
||||
except KeyError:
|
||||
short, long = '???', '???'
|
||||
if message is None:
|
||||
message = short
|
||||
explain = long
|
||||
self.log_error("code %d, message %s", code, message)
|
||||
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
|
||||
content = (self.error_message_format %
|
||||
{'code': code, 'message': _quote_html(message), 'explain': explain})
|
||||
self.send_response(code, message)
|
||||
self.send_header("Content-Type", self.error_content_type)
|
||||
self.send_header('Connection', 'close')
|
||||
self.end_headers()
|
||||
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
|
||||
self.wfile.write(content)
|
||||
|
||||
error_message_format = DEFAULT_ERROR_MESSAGE
|
||||
error_content_type = DEFAULT_ERROR_CONTENT_TYPE
|
||||
|
||||
def send_response(self, code, message=None):
|
||||
"""Send the response header and log the response code.
|
||||
|
||||
Also send two standard headers with the server software
|
||||
version and the current date.
|
||||
|
||||
"""
|
||||
self.log_request(code)
|
||||
if message is None:
|
||||
if code in self.responses:
|
||||
message = self.responses[code][0]
|
||||
else:
|
||||
message = ''
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self.wfile.write("%s %d %s\r\n" %
|
||||
(self.protocol_version, code, message))
|
||||
# print (self.protocol_version, code, message)
|
||||
self.send_header('Server', self.version_string())
|
||||
self.send_header('Date', self.date_time_string())
|
||||
|
||||
def send_header(self, keyword, value):
|
||||
"""Send a MIME header."""
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self.wfile.write("%s: %s\r\n" % (keyword, value))
|
||||
|
||||
if keyword.lower() == 'connection':
|
||||
if value.lower() == 'close':
|
||||
self.close_connection = 1
|
||||
elif value.lower() == 'keep-alive':
|
||||
self.close_connection = 0
|
||||
|
||||
def end_headers(self):
|
||||
"""Send the blank line ending the MIME headers."""
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self.wfile.write("\r\n")
|
||||
|
||||
def log_request(self, code='-', size='-'):
|
||||
"""Log an accepted request.
|
||||
|
||||
This is called by send_response().
|
||||
|
||||
"""
|
||||
|
||||
self.log_message('"%s" %s %s',
|
||||
self.requestline, str(code), str(size))
|
||||
|
||||
def log_error(self, format, *args):
|
||||
"""Log an error.
|
||||
|
||||
This is called when a request cannot be fulfilled. By
|
||||
default it passes the message on to log_message().
|
||||
|
||||
Arguments are the same as for log_message().
|
||||
|
||||
XXX This should go to the separate error log.
|
||||
|
||||
"""
|
||||
|
||||
self.log_message(format, *args)
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""Log an arbitrary message.
|
||||
|
||||
This is used by all other logging functions. Override
|
||||
it if you have specific logging wishes.
|
||||
|
||||
The first argument, FORMAT, is a format string for the
|
||||
message to be logged. If the format string contains
|
||||
any % escapes requiring parameters, they should be
|
||||
specified as subsequent arguments (it's just like
|
||||
printf!).
|
||||
|
||||
The client ip address and current date/time are prefixed to every
|
||||
message.
|
||||
|
||||
"""
|
||||
|
||||
sys.stderr.write("%s - - [%s] %s\n" %
|
||||
(self.client_address[0],
|
||||
self.log_date_time_string(),
|
||||
format%args))
|
||||
|
||||
def version_string(self):
|
||||
"""Return the server software version string."""
|
||||
return self.server_version + ' ' + self.sys_version
|
||||
|
||||
def date_time_string(self, timestamp=None):
|
||||
"""Return the current date and time formatted for a message header."""
|
||||
if timestamp is None:
|
||||
timestamp = time.time()
|
||||
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
|
||||
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||
self.weekdayname[wd],
|
||||
day, self.monthname[month], year,
|
||||
hh, mm, ss)
|
||||
return s
|
||||
|
||||
def log_date_time_string(self):
|
||||
"""Return the current time formatted for logging."""
|
||||
now = time.time()
|
||||
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
|
||||
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
|
||||
day, self.monthname[month], year, hh, mm, ss)
|
||||
return s
|
||||
|
||||
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
|
||||
|
||||
monthname = [None,
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
||||
|
||||
def address_string(self):
|
||||
"""Return the client address formatted for logging.
|
||||
|
||||
This version looks up the full hostname using gethostbyaddr(),
|
||||
and tries to find a name that contains at least one dot.
|
||||
|
||||
"""
|
||||
|
||||
host, port = self.client_address[:2]
|
||||
return socket.getfqdn(host)
|
||||
|
||||
# Essentially static class variables
|
||||
|
||||
# The version of the HTTP protocol we support.
|
||||
# Set this to HTTP/1.1 to enable automatic keepalive
|
||||
protocol_version = "HTTP/1.0"
|
||||
|
||||
# The Message-like class used to parse headers
|
||||
MessageClass = mimetools.Message
|
||||
|
||||
# Table mapping response codes to messages; entries have the
|
||||
# form {code: (shortmessage, longmessage)}.
|
||||
# See RFC 2616.
|
||||
responses = {
|
||||
100: ('Continue', 'Request received, please continue'),
|
||||
101: ('Switching Protocols',
|
||||
'Switching to new protocol; obey Upgrade header'),
|
||||
|
||||
200: ('OK', 'Request fulfilled, document follows'),
|
||||
201: ('Created', 'Document created, URL follows'),
|
||||
202: ('Accepted',
|
||||
'Request accepted, processing continues off-line'),
|
||||
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
|
||||
204: ('No Content', 'Request fulfilled, nothing follows'),
|
||||
205: ('Reset Content', 'Clear input form for further input.'),
|
||||
206: ('Partial Content', 'Partial content follows.'),
|
||||
|
||||
300: ('Multiple Choices',
|
||||
'Object has several resources -- see URI list'),
|
||||
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
|
||||
302: ('Found', 'Object moved temporarily -- see URI list'),
|
||||
303: ('See Other', 'Object moved -- see Method and URL list'),
|
||||
304: ('Not Modified',
|
||||
'Document has not changed since given time'),
|
||||
305: ('Use Proxy',
|
||||
'You must use proxy specified in Location to access this '
|
||||
'resource.'),
|
||||
307: ('Temporary Redirect',
|
||||
'Object moved temporarily -- see URI list'),
|
||||
|
||||
400: ('Bad Request',
|
||||
'Bad request syntax or unsupported method'),
|
||||
401: ('Unauthorized',
|
||||
'No permission -- see authorization schemes'),
|
||||
402: ('Payment Required',
|
||||
'No payment -- see charging schemes'),
|
||||
403: ('Forbidden',
|
||||
'Request forbidden -- authorization will not help'),
|
||||
404: ('Not Found', 'Nothing matches the given URI'),
|
||||
405: ('Method Not Allowed',
|
||||
'Specified method is invalid for this resource.'),
|
||||
406: ('Not Acceptable', 'URI not available in preferred format.'),
|
||||
407: ('Proxy Authentication Required', 'You must authenticate with '
|
||||
'this proxy before proceeding.'),
|
||||
408: ('Request Timeout', 'Request timed out; try again later.'),
|
||||
409: ('Conflict', 'Request conflict.'),
|
||||
410: ('Gone',
|
||||
'URI no longer exists and has been permanently removed.'),
|
||||
411: ('Length Required', 'Client must specify Content-Length.'),
|
||||
412: ('Precondition Failed', 'Precondition in headers is false.'),
|
||||
413: ('Request Entity Too Large', 'Entity is too large.'),
|
||||
414: ('Request-URI Too Long', 'URI is too long.'),
|
||||
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
|
||||
416: ('Requested Range Not Satisfiable',
|
||||
'Cannot satisfy request range.'),
|
||||
417: ('Expectation Failed',
|
||||
'Expect condition could not be satisfied.'),
|
||||
|
||||
500: ('Internal Server Error', 'Server got itself in trouble'),
|
||||
501: ('Not Implemented',
|
||||
'Server does not support this operation'),
|
||||
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
|
||||
503: ('Service Unavailable',
|
||||
'The server cannot process the request due to a high load'),
|
||||
504: ('Gateway Timeout',
|
||||
'The gateway server did not receive a timely response'),
|
||||
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
|
||||
}
|
||||
|
||||
|
||||
def test(HandlerClass = BaseHTTPRequestHandler,
|
||||
ServerClass = HTTPServer, protocol="HTTP/1.0"):
|
||||
"""Test the HTTP request handler class.
|
||||
|
||||
This runs an HTTP server on port 8000 (or the first command line
|
||||
argument).
|
||||
|
||||
"""
|
||||
|
||||
if sys.argv[1:]:
|
||||
port = int(sys.argv[1])
|
||||
else:
|
||||
port = 8000
|
||||
server_address = ('', port)
|
||||
|
||||
HandlerClass.protocol_version = protocol
|
||||
httpd = ServerClass(server_address, HandlerClass)
|
||||
|
||||
sa = httpd.socket.getsockname()
|
||||
print "Serving HTTP on", sa[0], "port", sa[1], "..."
|
||||
httpd.serve_forever()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
"""Bastionification utility.
|
||||
|
||||
A bastion (for another object -- the 'original') is an object that has
|
||||
the same methods as the original but does not give access to its
|
||||
instance variables. Bastions have a number of uses, but the most
|
||||
obvious one is to provide code executing in restricted mode with a
|
||||
safe interface to an object implemented in unrestricted mode.
|
||||
|
||||
The bastionification routine has an optional second argument which is
|
||||
a filter function. Only those methods for which the filter method
|
||||
(called with the method name as argument) returns true are accessible.
|
||||
The default filter method returns true unless the method name begins
|
||||
with an underscore.
|
||||
|
||||
There are a number of possible implementations of bastions. We use a
|
||||
'lazy' approach where the bastion's __getattr__() discipline does all
|
||||
the work for a particular method the first time it is used. This is
|
||||
usually fastest, especially if the user doesn't call all available
|
||||
methods. The retrieved methods are stored as instance variables of
|
||||
the bastion, so the overhead is only occurred on the first use of each
|
||||
method.
|
||||
|
||||
Detail: the bastion class has a __repr__() discipline which includes
|
||||
the repr() of the original object. This is precomputed when the
|
||||
bastion is created.
|
||||
|
||||
"""
|
||||
from warnings import warnpy3k
|
||||
warnpy3k("the Bastion module has been removed in Python 3.0", stacklevel=2)
|
||||
del warnpy3k
|
||||
|
||||
__all__ = ["BastionClass", "Bastion"]
|
||||
|
||||
from types import MethodType
|
||||
|
||||
|
||||
class BastionClass:
|
||||
|
||||
"""Helper class used by the Bastion() function.
|
||||
|
||||
You could subclass this and pass the subclass as the bastionclass
|
||||
argument to the Bastion() function, as long as the constructor has
|
||||
the same signature (a get() function and a name for the object).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, get, name):
|
||||
"""Constructor.
|
||||
|
||||
Arguments:
|
||||
|
||||
get - a function that gets the attribute value (by name)
|
||||
name - a human-readable name for the original object
|
||||
(suggestion: use repr(object))
|
||||
|
||||
"""
|
||||
self._get_ = get
|
||||
self._name_ = name
|
||||
|
||||
def __repr__(self):
|
||||
"""Return a representation string.
|
||||
|
||||
This includes the name passed in to the constructor, so that
|
||||
if you print the bastion during debugging, at least you have
|
||||
some idea of what it is.
|
||||
|
||||
"""
|
||||
return "<Bastion for %s>" % self._name_
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Get an as-yet undefined attribute value.
|
||||
|
||||
This calls the get() function that was passed to the
|
||||
constructor. The result is stored as an instance variable so
|
||||
that the next time the same attribute is requested,
|
||||
__getattr__() won't be invoked.
|
||||
|
||||
If the get() function raises an exception, this is simply
|
||||
passed on -- exceptions are not cached.
|
||||
|
||||
"""
|
||||
attribute = self._get_(name)
|
||||
self.__dict__[name] = attribute
|
||||
return attribute
|
||||
|
||||
|
||||
def Bastion(object, filter = lambda name: name[:1] != '_',
|
||||
name=None, bastionclass=BastionClass):
|
||||
"""Create a bastion for an object, using an optional filter.
|
||||
|
||||
See the Bastion module's documentation for background.
|
||||
|
||||
Arguments:
|
||||
|
||||
object - the original object
|
||||
filter - a predicate that decides whether a function name is OK;
|
||||
by default all names are OK that don't start with '_'
|
||||
name - the name of the object; default repr(object)
|
||||
bastionclass - class used to create the bastion; default BastionClass
|
||||
|
||||
"""
|
||||
|
||||
raise RuntimeError, "This code is not secure in Python 2.2 and later"
|
||||
|
||||
# Note: we define *two* ad-hoc functions here, get1 and get2.
|
||||
# Both are intended to be called in the same way: get(name).
|
||||
# It is clear that the real work (getting the attribute
|
||||
# from the object and calling the filter) is done in get1.
|
||||
# Why can't we pass get1 to the bastion? Because the user
|
||||
# would be able to override the filter argument! With get2,
|
||||
# overriding the default argument is no security loophole:
|
||||
# all it does is call it.
|
||||
# Also notice that we can't place the object and filter as
|
||||
# instance variables on the bastion object itself, since
|
||||
# the user has full access to all instance variables!
|
||||
|
||||
def get1(name, object=object, filter=filter):
|
||||
"""Internal function for Bastion(). See source comments."""
|
||||
if filter(name):
|
||||
attribute = getattr(object, name)
|
||||
if type(attribute) == MethodType:
|
||||
return attribute
|
||||
raise AttributeError, name
|
||||
|
||||
def get2(name, get1=get1):
|
||||
"""Internal function for Bastion(). See source comments."""
|
||||
return get1(name)
|
||||
|
||||
if name is None:
|
||||
name = repr(object)
|
||||
return bastionclass(get2, name)
|
||||
|
||||
|
||||
def _test():
|
||||
"""Test the Bastion() function."""
|
||||
class Original:
|
||||
def __init__(self):
|
||||
self.sum = 0
|
||||
def add(self, n):
|
||||
self._add(n)
|
||||
def _add(self, n):
|
||||
self.sum = self.sum + n
|
||||
def total(self):
|
||||
return self.sum
|
||||
o = Original()
|
||||
b = Bastion(o)
|
||||
testcode = """if 1:
|
||||
b.add(81)
|
||||
b.add(18)
|
||||
print "b.total() =", b.total()
|
||||
try:
|
||||
print "b.sum =", b.sum,
|
||||
except:
|
||||
print "inaccessible"
|
||||
else:
|
||||
print "accessible"
|
||||
try:
|
||||
print "b._add =", b._add,
|
||||
except:
|
||||
print "inaccessible"
|
||||
else:
|
||||
print "accessible"
|
||||
try:
|
||||
print "b._get_.func_defaults =", map(type, b._get_.func_defaults),
|
||||
except:
|
||||
print "inaccessible"
|
||||
else:
|
||||
print "accessible"
|
||||
\n"""
|
||||
exec testcode
|
||||
print '='*20, "Using rexec:", '='*20
|
||||
import rexec
|
||||
r = rexec.RExec()
|
||||
m = r.add_module('__main__')
|
||||
m.b = b
|
||||
r.r_exec(testcode)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_test()
|
||||
|
|
@ -1,377 +0,0 @@
|
|||
"""CGI-savvy HTTP Server.
|
||||
|
||||
This module builds on SimpleHTTPServer by implementing GET and POST
|
||||
requests to cgi-bin scripts.
|
||||
|
||||
If the os.fork() function is not present (e.g. on Windows),
|
||||
os.popen2() is used as a fallback, with slightly altered semantics; if
|
||||
that function is not present either (e.g. on Macintosh), only Python
|
||||
scripts are supported, and they are executed by the current process.
|
||||
|
||||
In all cases, the implementation is intentionally naive -- all
|
||||
requests are executed sychronously.
|
||||
|
||||
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
|
||||
-- it may execute arbitrary Python code or external programs.
|
||||
|
||||
Note that status code 200 is sent prior to execution of a CGI script, so
|
||||
scripts cannot send other status codes such as 302 (redirect).
|
||||
"""
|
||||
|
||||
|
||||
__version__ = "0.4"
|
||||
|
||||
__all__ = ["CGIHTTPRequestHandler"]
|
||||
|
||||
import os
|
||||
import sys
|
||||
import urllib
|
||||
import BaseHTTPServer
|
||||
import SimpleHTTPServer
|
||||
import select
|
||||
import copy
|
||||
|
||||
|
||||
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
|
||||
"""Complete HTTP server with GET, HEAD and POST commands.
|
||||
|
||||
GET and HEAD also support running CGI scripts.
|
||||
|
||||
The POST command is *only* implemented for CGI scripts.
|
||||
|
||||
"""
|
||||
|
||||
# Determine platform specifics
|
||||
have_fork = hasattr(os, 'fork')
|
||||
have_popen2 = hasattr(os, 'popen2')
|
||||
have_popen3 = hasattr(os, 'popen3')
|
||||
|
||||
# Make rfile unbuffered -- we need to read one line and then pass
|
||||
# the rest to a subprocess, so we can't use buffered input.
|
||||
rbufsize = 0
|
||||
|
||||
def do_POST(self):
|
||||
"""Serve a POST request.
|
||||
|
||||
This is only implemented for CGI scripts.
|
||||
|
||||
"""
|
||||
|
||||
if self.is_cgi():
|
||||
self.run_cgi()
|
||||
else:
|
||||
self.send_error(501, "Can only POST to CGI scripts")
|
||||
|
||||
def send_head(self):
|
||||
"""Version of send_head that support CGI scripts"""
|
||||
if self.is_cgi():
|
||||
return self.run_cgi()
|
||||
else:
|
||||
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
|
||||
|
||||
def is_cgi(self):
|
||||
"""Test whether self.path corresponds to a CGI script.
|
||||
|
||||
Returns True and updates the cgi_info attribute to the tuple
|
||||
(dir, rest) if self.path requires running a CGI script.
|
||||
Returns False otherwise.
|
||||
|
||||
If any exception is raised, the caller should assume that
|
||||
self.path was rejected as invalid and act accordingly.
|
||||
|
||||
The default implementation tests whether the normalized url
|
||||
path begins with one of the strings in self.cgi_directories
|
||||
(and the next character is a '/' or the end of the string).
|
||||
"""
|
||||
collapsed_path = _url_collapse_path(self.path)
|
||||
dir_sep = collapsed_path.find('/', 1)
|
||||
head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
|
||||
if head in self.cgi_directories:
|
||||
self.cgi_info = head, tail
|
||||
return True
|
||||
return False
|
||||
|
||||
cgi_directories = ['/cgi-bin', '/htbin']
|
||||
|
||||
def is_executable(self, path):
|
||||
"""Test whether argument path is an executable file."""
|
||||
return executable(path)
|
||||
|
||||
def is_python(self, path):
|
||||
"""Test whether argument path is a Python script."""
|
||||
head, tail = os.path.splitext(path)
|
||||
return tail.lower() in (".py", ".pyw")
|
||||
|
||||
def run_cgi(self):
|
||||
"""Execute a CGI script."""
|
||||
dir, rest = self.cgi_info
|
||||
|
||||
i = rest.find('/')
|
||||
while i >= 0:
|
||||
nextdir = rest[:i]
|
||||
nextrest = rest[i+1:]
|
||||
|
||||
scriptdir = self.translate_path(nextdir)
|
||||
if os.path.isdir(scriptdir):
|
||||
dir, rest = nextdir, nextrest
|
||||
i = rest.find('/')
|
||||
else:
|
||||
break
|
||||
|
||||
# find an explicit query string, if present.
|
||||
i = rest.rfind('?')
|
||||
if i >= 0:
|
||||
rest, query = rest[:i], rest[i+1:]
|
||||
else:
|
||||
query = ''
|
||||
|
||||
# dissect the part after the directory name into a script name &
|
||||
# a possible additional path, to be stored in PATH_INFO.
|
||||
i = rest.find('/')
|
||||
if i >= 0:
|
||||
script, rest = rest[:i], rest[i:]
|
||||
else:
|
||||
script, rest = rest, ''
|
||||
|
||||
scriptname = dir + '/' + script
|
||||
scriptfile = self.translate_path(scriptname)
|
||||
if not os.path.exists(scriptfile):
|
||||
self.send_error(404, "No such CGI script (%r)" % scriptname)
|
||||
return
|
||||
if not os.path.isfile(scriptfile):
|
||||
self.send_error(403, "CGI script is not a plain file (%r)" %
|
||||
scriptname)
|
||||
return
|
||||
ispy = self.is_python(scriptname)
|
||||
if not ispy:
|
||||
if not (self.have_fork or self.have_popen2 or self.have_popen3):
|
||||
self.send_error(403, "CGI script is not a Python script (%r)" %
|
||||
scriptname)
|
||||
return
|
||||
if not self.is_executable(scriptfile):
|
||||
self.send_error(403, "CGI script is not executable (%r)" %
|
||||
scriptname)
|
||||
return
|
||||
|
||||
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
|
||||
# XXX Much of the following could be prepared ahead of time!
|
||||
env = copy.deepcopy(os.environ)
|
||||
env['SERVER_SOFTWARE'] = self.version_string()
|
||||
env['SERVER_NAME'] = self.server.server_name
|
||||
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
|
||||
env['SERVER_PROTOCOL'] = self.protocol_version
|
||||
env['SERVER_PORT'] = str(self.server.server_port)
|
||||
env['REQUEST_METHOD'] = self.command
|
||||
uqrest = urllib.unquote(rest)
|
||||
env['PATH_INFO'] = uqrest
|
||||
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
|
||||
env['SCRIPT_NAME'] = scriptname
|
||||
if query:
|
||||
env['QUERY_STRING'] = query
|
||||
host = self.address_string()
|
||||
if host != self.client_address[0]:
|
||||
env['REMOTE_HOST'] = host
|
||||
env['REMOTE_ADDR'] = self.client_address[0]
|
||||
authorization = self.headers.getheader("authorization")
|
||||
if authorization:
|
||||
authorization = authorization.split()
|
||||
if len(authorization) == 2:
|
||||
import base64, binascii
|
||||
env['AUTH_TYPE'] = authorization[0]
|
||||
if authorization[0].lower() == "basic":
|
||||
try:
|
||||
authorization = base64.decodestring(authorization[1])
|
||||
except binascii.Error:
|
||||
pass
|
||||
else:
|
||||
authorization = authorization.split(':')
|
||||
if len(authorization) == 2:
|
||||
env['REMOTE_USER'] = authorization[0]
|
||||
# XXX REMOTE_IDENT
|
||||
if self.headers.typeheader is None:
|
||||
env['CONTENT_TYPE'] = self.headers.type
|
||||
else:
|
||||
env['CONTENT_TYPE'] = self.headers.typeheader
|
||||
length = self.headers.getheader('content-length')
|
||||
if length:
|
||||
env['CONTENT_LENGTH'] = length
|
||||
referer = self.headers.getheader('referer')
|
||||
if referer:
|
||||
env['HTTP_REFERER'] = referer
|
||||
accept = []
|
||||
for line in self.headers.getallmatchingheaders('accept'):
|
||||
if line[:1] in "\t\n\r ":
|
||||
accept.append(line.strip())
|
||||
else:
|
||||
accept = accept + line[7:].split(',')
|
||||
env['HTTP_ACCEPT'] = ','.join(accept)
|
||||
ua = self.headers.getheader('user-agent')
|
||||
if ua:
|
||||
env['HTTP_USER_AGENT'] = ua
|
||||
co = filter(None, self.headers.getheaders('cookie'))
|
||||
if co:
|
||||
env['HTTP_COOKIE'] = ', '.join(co)
|
||||
# XXX Other HTTP_* headers
|
||||
# Since we're setting the env in the parent, provide empty
|
||||
# values to override previously set values
|
||||
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
|
||||
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
|
||||
env.setdefault(k, "")
|
||||
|
||||
self.send_response(200, "Script output follows")
|
||||
|
||||
decoded_query = query.replace('+', ' ')
|
||||
|
||||
if self.have_fork:
|
||||
# Unix -- fork as we should
|
||||
args = [script]
|
||||
if '=' not in decoded_query:
|
||||
args.append(decoded_query)
|
||||
nobody = nobody_uid()
|
||||
self.wfile.flush() # Always flush before forking
|
||||
pid = os.fork()
|
||||
if pid != 0:
|
||||
# Parent
|
||||
pid, sts = os.waitpid(pid, 0)
|
||||
# throw away additional data [see bug #427345]
|
||||
while select.select([self.rfile], [], [], 0)[0]:
|
||||
if not self.rfile.read(1):
|
||||
break
|
||||
if sts:
|
||||
self.log_error("CGI script exit status %#x", sts)
|
||||
return
|
||||
# Child
|
||||
try:
|
||||
try:
|
||||
os.setuid(nobody)
|
||||
except os.error:
|
||||
pass
|
||||
os.dup2(self.rfile.fileno(), 0)
|
||||
os.dup2(self.wfile.fileno(), 1)
|
||||
os.execve(scriptfile, args, env)
|
||||
except:
|
||||
self.server.handle_error(self.request, self.client_address)
|
||||
os._exit(127)
|
||||
|
||||
else:
|
||||
# Non Unix - use subprocess
|
||||
import subprocess
|
||||
cmdline = [scriptfile]
|
||||
if self.is_python(scriptfile):
|
||||
interp = sys.executable
|
||||
if interp.lower().endswith("w.exe"):
|
||||
# On Windows, use python.exe, not pythonw.exe
|
||||
interp = interp[:-5] + interp[-4:]
|
||||
cmdline = [interp, '-u'] + cmdline
|
||||
if '=' not in query:
|
||||
cmdline.append(query)
|
||||
|
||||
self.log_message("command: %s", subprocess.list2cmdline(cmdline))
|
||||
try:
|
||||
nbytes = int(length)
|
||||
except (TypeError, ValueError):
|
||||
nbytes = 0
|
||||
p = subprocess.Popen(cmdline,
|
||||
stdin = subprocess.PIPE,
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE,
|
||||
env = env
|
||||
)
|
||||
if self.command.lower() == "post" and nbytes > 0:
|
||||
data = self.rfile.read(nbytes)
|
||||
else:
|
||||
data = None
|
||||
# throw away additional data [see bug #427345]
|
||||
while select.select([self.rfile._sock], [], [], 0)[0]:
|
||||
if not self.rfile._sock.recv(1):
|
||||
break
|
||||
stdout, stderr = p.communicate(data)
|
||||
self.wfile.write(stdout)
|
||||
if stderr:
|
||||
self.log_error('%s', stderr)
|
||||
p.stderr.close()
|
||||
p.stdout.close()
|
||||
status = p.returncode
|
||||
if status:
|
||||
self.log_error("CGI script exit status %#x", status)
|
||||
else:
|
||||
self.log_message("CGI script exited OK")
|
||||
|
||||
|
||||
def _url_collapse_path(path):
|
||||
"""
|
||||
Given a URL path, remove extra '/'s and '.' path elements and collapse
|
||||
any '..' references and returns a colllapsed path.
|
||||
|
||||
Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
|
||||
The utility of this function is limited to is_cgi method and helps
|
||||
preventing some security attacks.
|
||||
|
||||
Returns: A tuple of (head, tail) where tail is everything after the final /
|
||||
and head is everything before it. Head will always start with a '/' and,
|
||||
if it contains anything else, never have a trailing '/'.
|
||||
|
||||
Raises: IndexError if too many '..' occur within the path.
|
||||
|
||||
"""
|
||||
# Similar to os.path.split(os.path.normpath(path)) but specific to URL
|
||||
# path semantics rather than local operating system semantics.
|
||||
path_parts = path.split('/')
|
||||
head_parts = []
|
||||
for part in path_parts[:-1]:
|
||||
if part == '..':
|
||||
head_parts.pop() # IndexError if more '..' than prior parts
|
||||
elif part and part != '.':
|
||||
head_parts.append( part )
|
||||
if path_parts:
|
||||
tail_part = path_parts.pop()
|
||||
if tail_part:
|
||||
if tail_part == '..':
|
||||
head_parts.pop()
|
||||
tail_part = ''
|
||||
elif tail_part == '.':
|
||||
tail_part = ''
|
||||
else:
|
||||
tail_part = ''
|
||||
|
||||
splitpath = ('/' + '/'.join(head_parts), tail_part)
|
||||
collapsed_path = "/".join(splitpath)
|
||||
|
||||
return collapsed_path
|
||||
|
||||
|
||||
nobody = None
|
||||
|
||||
def nobody_uid():
|
||||
"""Internal routine to get nobody's uid"""
|
||||
global nobody
|
||||
if nobody:
|
||||
return nobody
|
||||
try:
|
||||
import pwd
|
||||
except ImportError:
|
||||
return -1
|
||||
try:
|
||||
nobody = pwd.getpwnam('nobody')[2]
|
||||
except KeyError:
|
||||
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
|
||||
return nobody
|
||||
|
||||
|
||||
def executable(path):
|
||||
"""Test for executable file."""
|
||||
try:
|
||||
st = os.stat(path)
|
||||
except os.error:
|
||||
return False
|
||||
return st.st_mode & 0111 != 0
|
||||
|
||||
|
||||
def test(HandlerClass = CGIHTTPRequestHandler,
|
||||
ServerClass = BaseHTTPServer.HTTPServer):
|
||||
SimpleHTTPServer.test(HandlerClass, ServerClass)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
@ -1,753 +0,0 @@
|
|||
"""Configuration file parser.
|
||||
|
||||
A setup file consists of sections, lead by a "[section]" header,
|
||||
and followed by "name: value" entries, with continuations and such in
|
||||
the style of RFC 822.
|
||||
|
||||
The option values can contain format strings which refer to other values in
|
||||
the same section, or values in a special [DEFAULT] section.
|
||||
|
||||
For example:
|
||||
|
||||
something: %(dir)s/whatever
|
||||
|
||||
would resolve the "%(dir)s" to the value of dir. All reference
|
||||
expansions are done late, on demand.
|
||||
|
||||
Intrinsic defaults can be specified by passing them into the
|
||||
ConfigParser constructor as a dictionary.
|
||||
|
||||
class:
|
||||
|
||||
ConfigParser -- responsible for parsing a list of
|
||||
configuration files, and managing the parsed database.
|
||||
|
||||
methods:
|
||||
|
||||
__init__(defaults=None)
|
||||
create the parser and specify a dictionary of intrinsic defaults. The
|
||||
keys must be strings, the values must be appropriate for %()s string
|
||||
interpolation. Note that `__name__' is always an intrinsic default;
|
||||
its value is the section's name.
|
||||
|
||||
sections()
|
||||
return all the configuration section names, sans DEFAULT
|
||||
|
||||
has_section(section)
|
||||
return whether the given section exists
|
||||
|
||||
has_option(section, option)
|
||||
return whether the given option exists in the given section
|
||||
|
||||
options(section)
|
||||
return list of configuration options for the named section
|
||||
|
||||
read(filenames)
|
||||
read and parse the list of named configuration files, given by
|
||||
name. A single filename is also allowed. Non-existing files
|
||||
are ignored. Return list of successfully read files.
|
||||
|
||||
readfp(fp, filename=None)
|
||||
read and parse one configuration file, given as a file object.
|
||||
The filename defaults to fp.name; it is only used in error
|
||||
messages (if fp has no `name' attribute, the string `<???>' is used).
|
||||
|
||||
get(section, option, raw=False, vars=None)
|
||||
return a string value for the named option. All % interpolations are
|
||||
expanded in the return values, based on the defaults passed into the
|
||||
constructor and the DEFAULT section. Additional substitutions may be
|
||||
provided using the `vars' argument, which must be a dictionary whose
|
||||
contents override any pre-existing defaults.
|
||||
|
||||
getint(section, options)
|
||||
like get(), but convert value to an integer
|
||||
|
||||
getfloat(section, options)
|
||||
like get(), but convert value to a float
|
||||
|
||||
getboolean(section, options)
|
||||
like get(), but convert value to a boolean (currently case
|
||||
insensitively defined as 0, false, no, off for False, and 1, true,
|
||||
yes, on for True). Returns False or True.
|
||||
|
||||
items(section, raw=False, vars=None)
|
||||
return a list of tuples with (name, value) for each option
|
||||
in the section.
|
||||
|
||||
remove_section(section)
|
||||
remove the given file section and all its options
|
||||
|
||||
remove_option(section, option)
|
||||
remove the given option from the given section
|
||||
|
||||
set(section, option, value)
|
||||
set the given option
|
||||
|
||||
write(fp)
|
||||
write the configuration state in .ini format
|
||||
"""
|
||||
|
||||
try:
|
||||
from collections import OrderedDict as _default_dict
|
||||
except ImportError:
|
||||
# fallback for setup.py which hasn't yet built _collections
|
||||
_default_dict = dict
|
||||
|
||||
import re
|
||||
|
||||
__all__ = ["NoSectionError", "DuplicateSectionError", "NoOptionError",
|
||||
"InterpolationError", "InterpolationDepthError",
|
||||
"InterpolationSyntaxError", "ParsingError",
|
||||
"MissingSectionHeaderError",
|
||||
"ConfigParser", "SafeConfigParser", "RawConfigParser",
|
||||
"DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
|
||||
|
||||
DEFAULTSECT = "DEFAULT"
|
||||
|
||||
MAX_INTERPOLATION_DEPTH = 10
|
||||
|
||||
|
||||
|
||||
# exception classes
|
||||
class Error(Exception):
|
||||
"""Base class for ConfigParser exceptions."""
|
||||
|
||||
def _get_message(self):
|
||||
"""Getter for 'message'; needed only to override deprecation in
|
||||
BaseException."""
|
||||
return self.__message
|
||||
|
||||
def _set_message(self, value):
|
||||
"""Setter for 'message'; needed only to override deprecation in
|
||||
BaseException."""
|
||||
self.__message = value
|
||||
|
||||
# BaseException.message has been deprecated since Python 2.6. To prevent
|
||||
# DeprecationWarning from popping up over this pre-existing attribute, use
|
||||
# a new property that takes lookup precedence.
|
||||
message = property(_get_message, _set_message)
|
||||
|
||||
def __init__(self, msg=''):
|
||||
self.message = msg
|
||||
Exception.__init__(self, msg)
|
||||
|
||||
def __repr__(self):
|
||||
return self.message
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
class NoSectionError(Error):
|
||||
"""Raised when no section matches a requested option."""
|
||||
|
||||
def __init__(self, section):
|
||||
Error.__init__(self, 'No section: %r' % (section,))
|
||||
self.section = section
|
||||
self.args = (section, )
|
||||
|
||||
class DuplicateSectionError(Error):
|
||||
"""Raised when a section is multiply-created."""
|
||||
|
||||
def __init__(self, section):
|
||||
Error.__init__(self, "Section %r already exists" % section)
|
||||
self.section = section
|
||||
self.args = (section, )
|
||||
|
||||
class NoOptionError(Error):
|
||||
"""A requested option was not found."""
|
||||
|
||||
def __init__(self, option, section):
|
||||
Error.__init__(self, "No option %r in section: %r" %
|
||||
(option, section))
|
||||
self.option = option
|
||||
self.section = section
|
||||
self.args = (option, section)
|
||||
|
||||
class InterpolationError(Error):
|
||||
"""Base class for interpolation-related exceptions."""
|
||||
|
||||
def __init__(self, option, section, msg):
|
||||
Error.__init__(self, msg)
|
||||
self.option = option
|
||||
self.section = section
|
||||
self.args = (option, section, msg)
|
||||
|
||||
class InterpolationMissingOptionError(InterpolationError):
|
||||
"""A string substitution required a setting which was not available."""
|
||||
|
||||
def __init__(self, option, section, rawval, reference):
|
||||
msg = ("Bad value substitution:\n"
|
||||
"\tsection: [%s]\n"
|
||||
"\toption : %s\n"
|
||||
"\tkey : %s\n"
|
||||
"\trawval : %s\n"
|
||||
% (section, option, reference, rawval))
|
||||
InterpolationError.__init__(self, option, section, msg)
|
||||
self.reference = reference
|
||||
self.args = (option, section, rawval, reference)
|
||||
|
||||
class InterpolationSyntaxError(InterpolationError):
|
||||
"""Raised when the source text into which substitutions are made
|
||||
does not conform to the required syntax."""
|
||||
|
||||
class InterpolationDepthError(InterpolationError):
|
||||
"""Raised when substitutions are nested too deeply."""
|
||||
|
||||
def __init__(self, option, section, rawval):
|
||||
msg = ("Value interpolation too deeply recursive:\n"
|
||||
"\tsection: [%s]\n"
|
||||
"\toption : %s\n"
|
||||
"\trawval : %s\n"
|
||||
% (section, option, rawval))
|
||||
InterpolationError.__init__(self, option, section, msg)
|
||||
self.args = (option, section, rawval)
|
||||
|
||||
class ParsingError(Error):
|
||||
"""Raised when a configuration file does not follow legal syntax."""
|
||||
|
||||
def __init__(self, filename):
|
||||
Error.__init__(self, 'File contains parsing errors: %s' % filename)
|
||||
self.filename = filename
|
||||
self.errors = []
|
||||
self.args = (filename, )
|
||||
|
||||
def append(self, lineno, line):
|
||||
self.errors.append((lineno, line))
|
||||
self.message += '\n\t[line %2d]: %s' % (lineno, line)
|
||||
|
||||
class MissingSectionHeaderError(ParsingError):
|
||||
"""Raised when a key-value pair is found before any section header."""
|
||||
|
||||
def __init__(self, filename, lineno, line):
|
||||
Error.__init__(
|
||||
self,
|
||||
'File contains no section headers.\nfile: %s, line: %d\n%r' %
|
||||
(filename, lineno, line))
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
self.line = line
|
||||
self.args = (filename, lineno, line)
|
||||
|
||||
|
||||
class RawConfigParser:
|
||||
def __init__(self, defaults=None, dict_type=_default_dict,
|
||||
allow_no_value=False):
|
||||
self._dict = dict_type
|
||||
self._sections = self._dict()
|
||||
self._defaults = self._dict()
|
||||
if allow_no_value:
|
||||
self._optcre = self.OPTCRE_NV
|
||||
else:
|
||||
self._optcre = self.OPTCRE
|
||||
if defaults:
|
||||
for key, value in defaults.items():
|
||||
self._defaults[self.optionxform(key)] = value
|
||||
|
||||
def defaults(self):
|
||||
return self._defaults
|
||||
|
||||
def sections(self):
|
||||
"""Return a list of section names, excluding [DEFAULT]"""
|
||||
# self._sections will never have [DEFAULT] in it
|
||||
return self._sections.keys()
|
||||
|
||||
def add_section(self, section):
|
||||
"""Create a new section in the configuration.
|
||||
|
||||
Raise DuplicateSectionError if a section by the specified name
|
||||
already exists. Raise ValueError if name is DEFAULT or any of it's
|
||||
case-insensitive variants.
|
||||
"""
|
||||
if section.lower() == "default":
|
||||
raise ValueError, 'Invalid section name: %s' % section
|
||||
|
||||
if section in self._sections:
|
||||
raise DuplicateSectionError(section)
|
||||
self._sections[section] = self._dict()
|
||||
|
||||
def has_section(self, section):
|
||||
"""Indicate whether the named section is present in the configuration.
|
||||
|
||||
The DEFAULT section is not acknowledged.
|
||||
"""
|
||||
return section in self._sections
|
||||
|
||||
def options(self, section):
|
||||
"""Return a list of option names for the given section name."""
|
||||
try:
|
||||
opts = self._sections[section].copy()
|
||||
except KeyError:
|
||||
raise NoSectionError(section)
|
||||
opts.update(self._defaults)
|
||||
if '__name__' in opts:
|
||||
del opts['__name__']
|
||||
return opts.keys()
|
||||
|
||||
def read(self, filenames):
|
||||
"""Read and parse a filename or a list of filenames.
|
||||
|
||||
Files that cannot be opened are silently ignored; this is
|
||||
designed so that you can specify a list of potential
|
||||
configuration file locations (e.g. current directory, user's
|
||||
home directory, systemwide directory), and all existing
|
||||
configuration files in the list will be read. A single
|
||||
filename may also be given.
|
||||
|
||||
Return list of successfully read files.
|
||||
"""
|
||||
if isinstance(filenames, basestring):
|
||||
filenames = [filenames]
|
||||
read_ok = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
fp = open(filename)
|
||||
except IOError:
|
||||
continue
|
||||
self._read(fp, filename)
|
||||
fp.close()
|
||||
read_ok.append(filename)
|
||||
return read_ok
|
||||
|
||||
def readfp(self, fp, filename=None):
|
||||
"""Like read() but the argument must be a file-like object.
|
||||
|
||||
The `fp' argument must have a `readline' method. Optional
|
||||
second argument is the `filename', which if not given, is
|
||||
taken from fp.name. If fp has no `name' attribute, `<???>' is
|
||||
used.
|
||||
|
||||
"""
|
||||
if filename is None:
|
||||
try:
|
||||
filename = fp.name
|
||||
except AttributeError:
|
||||
filename = '<???>'
|
||||
self._read(fp, filename)
|
||||
|
||||
def get(self, section, option):
|
||||
opt = self.optionxform(option)
|
||||
if section not in self._sections:
|
||||
if section != DEFAULTSECT:
|
||||
raise NoSectionError(section)
|
||||
if opt in self._defaults:
|
||||
return self._defaults[opt]
|
||||
else:
|
||||
raise NoOptionError(option, section)
|
||||
elif opt in self._sections[section]:
|
||||
return self._sections[section][opt]
|
||||
elif opt in self._defaults:
|
||||
return self._defaults[opt]
|
||||
else:
|
||||
raise NoOptionError(option, section)
|
||||
|
||||
def items(self, section):
|
||||
try:
|
||||
d2 = self._sections[section]
|
||||
except KeyError:
|
||||
if section != DEFAULTSECT:
|
||||
raise NoSectionError(section)
|
||||
d2 = self._dict()
|
||||
d = self._defaults.copy()
|
||||
d.update(d2)
|
||||
if "__name__" in d:
|
||||
del d["__name__"]
|
||||
return d.items()
|
||||
|
||||
def _get(self, section, conv, option):
|
||||
return conv(self.get(section, option))
|
||||
|
||||
def getint(self, section, option):
|
||||
return self._get(section, int, option)
|
||||
|
||||
def getfloat(self, section, option):
|
||||
return self._get(section, float, option)
|
||||
|
||||
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
|
||||
'0': False, 'no': False, 'false': False, 'off': False}
|
||||
|
||||
def getboolean(self, section, option):
|
||||
v = self.get(section, option)
|
||||
if v.lower() not in self._boolean_states:
|
||||
raise ValueError, 'Not a boolean: %s' % v
|
||||
return self._boolean_states[v.lower()]
|
||||
|
||||
def optionxform(self, optionstr):
|
||||
return optionstr.lower()
|
||||
|
||||
def has_option(self, section, option):
|
||||
"""Check for the existence of a given option in a given section."""
|
||||
if not section or section == DEFAULTSECT:
|
||||
option = self.optionxform(option)
|
||||
return option in self._defaults
|
||||
elif section not in self._sections:
|
||||
return False
|
||||
else:
|
||||
option = self.optionxform(option)
|
||||
return (option in self._sections[section]
|
||||
or option in self._defaults)
|
||||
|
||||
def set(self, section, option, value=None):
|
||||
"""Set an option."""
|
||||
if not section or section == DEFAULTSECT:
|
||||
sectdict = self._defaults
|
||||
else:
|
||||
try:
|
||||
sectdict = self._sections[section]
|
||||
except KeyError:
|
||||
raise NoSectionError(section)
|
||||
sectdict[self.optionxform(option)] = value
|
||||
|
||||
def write(self, fp):
|
||||
"""Write an .ini-format representation of the configuration state."""
|
||||
if self._defaults:
|
||||
fp.write("[%s]\n" % DEFAULTSECT)
|
||||
for (key, value) in self._defaults.items():
|
||||
fp.write("%s = %s\n" % (key, str(value).replace('\n', '\n\t')))
|
||||
fp.write("\n")
|
||||
for section in self._sections:
|
||||
fp.write("[%s]\n" % section)
|
||||
for (key, value) in self._sections[section].items():
|
||||
if key == "__name__":
|
||||
continue
|
||||
if (value is not None) or (self._optcre == self.OPTCRE):
|
||||
key = " = ".join((key, str(value).replace('\n', '\n\t')))
|
||||
fp.write("%s\n" % (key))
|
||||
fp.write("\n")
|
||||
|
||||
def remove_option(self, section, option):
|
||||
"""Remove an option."""
|
||||
if not section or section == DEFAULTSECT:
|
||||
sectdict = self._defaults
|
||||
else:
|
||||
try:
|
||||
sectdict = self._sections[section]
|
||||
except KeyError:
|
||||
raise NoSectionError(section)
|
||||
option = self.optionxform(option)
|
||||
existed = option in sectdict
|
||||
if existed:
|
||||
del sectdict[option]
|
||||
return existed
|
||||
|
||||
def remove_section(self, section):
|
||||
"""Remove a file section."""
|
||||
existed = section in self._sections
|
||||
if existed:
|
||||
del self._sections[section]
|
||||
return existed
|
||||
|
||||
#
|
||||
# Regular expressions for parsing section headers and options.
|
||||
#
|
||||
SECTCRE = re.compile(
|
||||
r'\[' # [
|
||||
r'(?P<header>[^]]+)' # very permissive!
|
||||
r'\]' # ]
|
||||
)
|
||||
OPTCRE = re.compile(
|
||||
r'(?P<option>[^:=\s][^:=]*)' # very permissive!
|
||||
r'\s*(?P<vi>[:=])\s*' # any number of space/tab,
|
||||
# followed by separator
|
||||
# (either : or =), followed
|
||||
# by any # space/tab
|
||||
r'(?P<value>.*)$' # everything up to eol
|
||||
)
|
||||
OPTCRE_NV = re.compile(
|
||||
r'(?P<option>[^:=\s][^:=]*)' # very permissive!
|
||||
r'\s*(?:' # any number of space/tab,
|
||||
r'(?P<vi>[:=])\s*' # optionally followed by
|
||||
# separator (either : or
|
||||
# =), followed by any #
|
||||
# space/tab
|
||||
r'(?P<value>.*))?$' # everything up to eol
|
||||
)
|
||||
|
||||
def _read(self, fp, fpname):
|
||||
"""Parse a sectioned setup file.
|
||||
|
||||
The sections in setup file contains a title line at the top,
|
||||
indicated by a name in square brackets (`[]'), plus key/value
|
||||
options lines, indicated by `name: value' format lines.
|
||||
Continuations are represented by an embedded newline then
|
||||
leading whitespace. Blank lines, lines beginning with a '#',
|
||||
and just about everything else are ignored.
|
||||
"""
|
||||
cursect = None # None, or a dictionary
|
||||
optname = None
|
||||
lineno = 0
|
||||
e = None # None, or an exception
|
||||
while True:
|
||||
line = fp.readline()
|
||||
if not line:
|
||||
break
|
||||
lineno = lineno + 1
|
||||
# comment or blank line?
|
||||
if line.strip() == '' or line[0] in '#;':
|
||||
continue
|
||||
if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR":
|
||||
# no leading whitespace
|
||||
continue
|
||||
# continuation line?
|
||||
if line[0].isspace() and cursect is not None and optname:
|
||||
value = line.strip()
|
||||
if value:
|
||||
cursect[optname].append(value)
|
||||
# a section header or option header?
|
||||
else:
|
||||
# is it a section header?
|
||||
mo = self.SECTCRE.match(line)
|
||||
if mo:
|
||||
sectname = mo.group('header')
|
||||
if sectname in self._sections:
|
||||
cursect = self._sections[sectname]
|
||||
elif sectname == DEFAULTSECT:
|
||||
cursect = self._defaults
|
||||
else:
|
||||
cursect = self._dict()
|
||||
cursect['__name__'] = sectname
|
||||
self._sections[sectname] = cursect
|
||||
# So sections can't start with a continuation line
|
||||
optname = None
|
||||
# no section header in the file?
|
||||
elif cursect is None:
|
||||
raise MissingSectionHeaderError(fpname, lineno, line)
|
||||
# an option line?
|
||||
else:
|
||||
mo = self._optcre.match(line)
|
||||
if mo:
|
||||
optname, vi, optval = mo.group('option', 'vi', 'value')
|
||||
optname = self.optionxform(optname.rstrip())
|
||||
# This check is fine because the OPTCRE cannot
|
||||
# match if it would set optval to None
|
||||
if optval is not None:
|
||||
if vi in ('=', ':') and ';' in optval:
|
||||
# ';' is a comment delimiter only if it follows
|
||||
# a spacing character
|
||||
pos = optval.find(';')
|
||||
if pos != -1 and optval[pos-1].isspace():
|
||||
optval = optval[:pos]
|
||||
optval = optval.strip()
|
||||
# allow empty values
|
||||
if optval == '""':
|
||||
optval = ''
|
||||
cursect[optname] = [optval]
|
||||
else:
|
||||
# valueless option handling
|
||||
cursect[optname] = optval
|
||||
else:
|
||||
# a non-fatal parsing error occurred. set up the
|
||||
# exception but keep going. the exception will be
|
||||
# raised at the end of the file and will contain a
|
||||
# list of all bogus lines
|
||||
if not e:
|
||||
e = ParsingError(fpname)
|
||||
e.append(lineno, repr(line))
|
||||
# if any parsing errors occurred, raise an exception
|
||||
if e:
|
||||
raise e
|
||||
|
||||
# join the multi-line values collected while reading
|
||||
all_sections = [self._defaults]
|
||||
all_sections.extend(self._sections.values())
|
||||
for options in all_sections:
|
||||
for name, val in options.items():
|
||||
if isinstance(val, list):
|
||||
options[name] = '\n'.join(val)
|
||||
|
||||
import UserDict as _UserDict
|
||||
|
||||
class _Chainmap(_UserDict.DictMixin):
|
||||
"""Combine multiple mappings for successive lookups.
|
||||
|
||||
For example, to emulate Python's normal lookup sequence:
|
||||
|
||||
import __builtin__
|
||||
pylookup = _Chainmap(locals(), globals(), vars(__builtin__))
|
||||
"""
|
||||
|
||||
def __init__(self, *maps):
|
||||
self._maps = maps
|
||||
|
||||
def __getitem__(self, key):
|
||||
for mapping in self._maps:
|
||||
try:
|
||||
return mapping[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError(key)
|
||||
|
||||
def keys(self):
|
||||
result = []
|
||||
seen = set()
|
||||
for mapping in self._maps:
|
||||
for key in mapping:
|
||||
if key not in seen:
|
||||
result.append(key)
|
||||
seen.add(key)
|
||||
return result
|
||||
|
||||
class ConfigParser(RawConfigParser):
|
||||
|
||||
def get(self, section, option, raw=False, vars=None):
|
||||
"""Get an option value for a given section.
|
||||
|
||||
If `vars' is provided, it must be a dictionary. The option is looked up
|
||||
in `vars' (if provided), `section', and in `defaults' in that order.
|
||||
|
||||
All % interpolations are expanded in the return values, unless the
|
||||
optional argument `raw' is true. Values for interpolation keys are
|
||||
looked up in the same manner as the option.
|
||||
|
||||
The section DEFAULT is special.
|
||||
"""
|
||||
sectiondict = {}
|
||||
try:
|
||||
sectiondict = self._sections[section]
|
||||
except KeyError:
|
||||
if section != DEFAULTSECT:
|
||||
raise NoSectionError(section)
|
||||
# Update with the entry specific variables
|
||||
vardict = {}
|
||||
if vars:
|
||||
for key, value in vars.items():
|
||||
vardict[self.optionxform(key)] = value
|
||||
d = _Chainmap(vardict, sectiondict, self._defaults)
|
||||
option = self.optionxform(option)
|
||||
try:
|
||||
value = d[option]
|
||||
except KeyError:
|
||||
raise NoOptionError(option, section)
|
||||
|
||||
if raw or value is None:
|
||||
return value
|
||||
else:
|
||||
return self._interpolate(section, option, value, d)
|
||||
|
||||
def items(self, section, raw=False, vars=None):
|
||||
"""Return a list of tuples with (name, value) for each option
|
||||
in the section.
|
||||
|
||||
All % interpolations are expanded in the return values, based on the
|
||||
defaults passed into the constructor, unless the optional argument
|
||||
`raw' is true. Additional substitutions may be provided using the
|
||||
`vars' argument, which must be a dictionary whose contents overrides
|
||||
any pre-existing defaults.
|
||||
|
||||
The section DEFAULT is special.
|
||||
"""
|
||||
d = self._defaults.copy()
|
||||
try:
|
||||
d.update(self._sections[section])
|
||||
except KeyError:
|
||||
if section != DEFAULTSECT:
|
||||
raise NoSectionError(section)
|
||||
# Update with the entry specific variables
|
||||
if vars:
|
||||
for key, value in vars.items():
|
||||
d[self.optionxform(key)] = value
|
||||
options = d.keys()
|
||||
if "__name__" in options:
|
||||
options.remove("__name__")
|
||||
if raw:
|
||||
return [(option, d[option])
|
||||
for option in options]
|
||||
else:
|
||||
return [(option, self._interpolate(section, option, d[option], d))
|
||||
for option in options]
|
||||
|
||||
def _interpolate(self, section, option, rawval, vars):
|
||||
# do the string interpolation
|
||||
value = rawval
|
||||
depth = MAX_INTERPOLATION_DEPTH
|
||||
while depth: # Loop through this until it's done
|
||||
depth -= 1
|
||||
if value and "%(" in value:
|
||||
value = self._KEYCRE.sub(self._interpolation_replace, value)
|
||||
try:
|
||||
value = value % vars
|
||||
except KeyError, e:
|
||||
raise InterpolationMissingOptionError(
|
||||
option, section, rawval, e.args[0])
|
||||
else:
|
||||
break
|
||||
if value and "%(" in value:
|
||||
raise InterpolationDepthError(option, section, rawval)
|
||||
return value
|
||||
|
||||
_KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
|
||||
|
||||
def _interpolation_replace(self, match):
|
||||
s = match.group(1)
|
||||
if s is None:
|
||||
return match.group()
|
||||
else:
|
||||
return "%%(%s)s" % self.optionxform(s)
|
||||
|
||||
|
||||
class SafeConfigParser(ConfigParser):
|
||||
|
||||
def _interpolate(self, section, option, rawval, vars):
|
||||
# do the string interpolation
|
||||
L = []
|
||||
self._interpolate_some(option, L, rawval, section, vars, 1)
|
||||
return ''.join(L)
|
||||
|
||||
_interpvar_re = re.compile(r"%\(([^)]+)\)s")
|
||||
|
||||
def _interpolate_some(self, option, accum, rest, section, map, depth):
|
||||
if depth > MAX_INTERPOLATION_DEPTH:
|
||||
raise InterpolationDepthError(option, section, rest)
|
||||
while rest:
|
||||
p = rest.find("%")
|
||||
if p < 0:
|
||||
accum.append(rest)
|
||||
return
|
||||
if p > 0:
|
||||
accum.append(rest[:p])
|
||||
rest = rest[p:]
|
||||
# p is no longer used
|
||||
c = rest[1:2]
|
||||
if c == "%":
|
||||
accum.append("%")
|
||||
rest = rest[2:]
|
||||
elif c == "(":
|
||||
m = self._interpvar_re.match(rest)
|
||||
if m is None:
|
||||
raise InterpolationSyntaxError(option, section,
|
||||
"bad interpolation variable reference %r" % rest)
|
||||
var = self.optionxform(m.group(1))
|
||||
rest = rest[m.end():]
|
||||
try:
|
||||
v = map[var]
|
||||
except KeyError:
|
||||
raise InterpolationMissingOptionError(
|
||||
option, section, rest, var)
|
||||
if "%" in v:
|
||||
self._interpolate_some(option, accum, v,
|
||||
section, map, depth + 1)
|
||||
else:
|
||||
accum.append(v)
|
||||
else:
|
||||
raise InterpolationSyntaxError(
|
||||
option, section,
|
||||
"'%%' must be followed by '%%' or '(', found: %r" % (rest,))
|
||||
|
||||
def set(self, section, option, value=None):
|
||||
"""Set an option. Extend ConfigParser.set: check for string values."""
|
||||
# The only legal non-string value if we allow valueless
|
||||
# options is None, so we need to check if the value is a
|
||||
# string if:
|
||||
# - we do not allow valueless options, or
|
||||
# - we allow valueless options but the value is not None
|
||||
if self._optcre is self.OPTCRE or value:
|
||||
if not isinstance(value, basestring):
|
||||
raise TypeError("option values must be strings")
|
||||
if value is not None:
|
||||
# check for bad percent signs:
|
||||
# first, replace all "good" interpolations
|
||||
tmp_value = value.replace('%%', '')
|
||||
tmp_value = self._interpvar_re.sub('', tmp_value)
|
||||
# then, check if there's a lone percent sign left
|
||||
if '%' in tmp_value:
|
||||
raise ValueError("invalid interpolation syntax in %r at "
|
||||
"position %d" % (value, tmp_value.find('%')))
|
||||
ConfigParser.set(self, section, option, value)
|
||||
|
|
@ -1,279 +0,0 @@
|
|||
"""Self documenting XML-RPC Server.
|
||||
|
||||
This module can be used to create XML-RPC servers that
|
||||
serve pydoc-style documentation in response to HTTP
|
||||
GET requests. This documentation is dynamically generated
|
||||
based on the functions and methods registered with the
|
||||
server.
|
||||
|
||||
This module is built upon the pydoc and SimpleXMLRPCServer
|
||||
modules.
|
||||
"""
|
||||
|
||||
import pydoc
|
||||
import inspect
|
||||
import re
|
||||
import sys
|
||||
|
||||
from SimpleXMLRPCServer import (SimpleXMLRPCServer,
|
||||
SimpleXMLRPCRequestHandler,
|
||||
CGIXMLRPCRequestHandler,
|
||||
resolve_dotted_attribute)
|
||||
|
||||
class ServerHTMLDoc(pydoc.HTMLDoc):
|
||||
"""Class used to generate pydoc HTML document for a server"""
|
||||
|
||||
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
|
||||
"""Mark up some plain text, given a context of symbols to look for.
|
||||
Each context dictionary maps object names to anchor names."""
|
||||
escape = escape or self.escape
|
||||
results = []
|
||||
here = 0
|
||||
|
||||
# XXX Note that this regular expression does not allow for the
|
||||
# hyperlinking of arbitrary strings being used as method
|
||||
# names. Only methods with names consisting of word characters
|
||||
# and '.'s are hyperlinked.
|
||||
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
|
||||
r'RFC[- ]?(\d+)|'
|
||||
r'PEP[- ]?(\d+)|'
|
||||
r'(self\.)?((?:\w|\.)+))\b')
|
||||
while 1:
|
||||
match = pattern.search(text, here)
|
||||
if not match: break
|
||||
start, end = match.span()
|
||||
results.append(escape(text[here:start]))
|
||||
|
||||
all, scheme, rfc, pep, selfdot, name = match.groups()
|
||||
if scheme:
|
||||
url = escape(all).replace('"', '"')
|
||||
results.append('<a href="%s">%s</a>' % (url, url))
|
||||
elif rfc:
|
||||
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
|
||||
results.append('<a href="%s">%s</a>' % (url, escape(all)))
|
||||
elif pep:
|
||||
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
|
||||
results.append('<a href="%s">%s</a>' % (url, escape(all)))
|
||||
elif text[end:end+1] == '(':
|
||||
results.append(self.namelink(name, methods, funcs, classes))
|
||||
elif selfdot:
|
||||
results.append('self.<strong>%s</strong>' % name)
|
||||
else:
|
||||
results.append(self.namelink(name, classes))
|
||||
here = end
|
||||
results.append(escape(text[here:]))
|
||||
return ''.join(results)
|
||||
|
||||
def docroutine(self, object, name, mod=None,
|
||||
funcs={}, classes={}, methods={}, cl=None):
|
||||
"""Produce HTML documentation for a function or method object."""
|
||||
|
||||
anchor = (cl and cl.__name__ or '') + '-' + name
|
||||
note = ''
|
||||
|
||||
title = '<a name="%s"><strong>%s</strong></a>' % (
|
||||
self.escape(anchor), self.escape(name))
|
||||
|
||||
if inspect.ismethod(object):
|
||||
args, varargs, varkw, defaults = inspect.getargspec(object.im_func)
|
||||
# exclude the argument bound to the instance, it will be
|
||||
# confusing to the non-Python user
|
||||
argspec = inspect.formatargspec (
|
||||
args[1:],
|
||||
varargs,
|
||||
varkw,
|
||||
defaults,
|
||||
formatvalue=self.formatvalue
|
||||
)
|
||||
elif inspect.isfunction(object):
|
||||
args, varargs, varkw, defaults = inspect.getargspec(object)
|
||||
argspec = inspect.formatargspec(
|
||||
args, varargs, varkw, defaults, formatvalue=self.formatvalue)
|
||||
else:
|
||||
argspec = '(...)'
|
||||
|
||||
if isinstance(object, tuple):
|
||||
argspec = object[0] or argspec
|
||||
docstring = object[1] or ""
|
||||
else:
|
||||
docstring = pydoc.getdoc(object)
|
||||
|
||||
decl = title + argspec + (note and self.grey(
|
||||
'<font face="helvetica, arial">%s</font>' % note))
|
||||
|
||||
doc = self.markup(
|
||||
docstring, self.preformat, funcs, classes, methods)
|
||||
doc = doc and '<dd><tt>%s</tt></dd>' % doc
|
||||
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
|
||||
|
||||
def docserver(self, server_name, package_documentation, methods):
|
||||
"""Produce HTML documentation for an XML-RPC server."""
|
||||
|
||||
fdict = {}
|
||||
for key, value in methods.items():
|
||||
fdict[key] = '#-' + key
|
||||
fdict[value] = fdict[key]
|
||||
|
||||
server_name = self.escape(server_name)
|
||||
head = '<big><big><strong>%s</strong></big></big>' % server_name
|
||||
result = self.heading(head, '#ffffff', '#7799ee')
|
||||
|
||||
doc = self.markup(package_documentation, self.preformat, fdict)
|
||||
doc = doc and '<tt>%s</tt>' % doc
|
||||
result = result + '<p>%s</p>\n' % doc
|
||||
|
||||
contents = []
|
||||
method_items = sorted(methods.items())
|
||||
for key, value in method_items:
|
||||
contents.append(self.docroutine(value, key, funcs=fdict))
|
||||
result = result + self.bigsection(
|
||||
'Methods', '#ffffff', '#eeaa77', pydoc.join(contents))
|
||||
|
||||
return result
|
||||
|
||||
class XMLRPCDocGenerator:
|
||||
"""Generates documentation for an XML-RPC server.
|
||||
|
||||
This class is designed as mix-in and should not
|
||||
be constructed directly.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# setup variables used for HTML documentation
|
||||
self.server_name = 'XML-RPC Server Documentation'
|
||||
self.server_documentation = \
|
||||
"This server exports the following methods through the XML-RPC "\
|
||||
"protocol."
|
||||
self.server_title = 'XML-RPC Server Documentation'
|
||||
|
||||
def set_server_title(self, server_title):
|
||||
"""Set the HTML title of the generated server documentation"""
|
||||
|
||||
self.server_title = server_title
|
||||
|
||||
def set_server_name(self, server_name):
|
||||
"""Set the name of the generated HTML server documentation"""
|
||||
|
||||
self.server_name = server_name
|
||||
|
||||
def set_server_documentation(self, server_documentation):
|
||||
"""Set the documentation string for the entire server."""
|
||||
|
||||
self.server_documentation = server_documentation
|
||||
|
||||
def generate_html_documentation(self):
|
||||
"""generate_html_documentation() => html documentation for the server
|
||||
|
||||
Generates HTML documentation for the server using introspection for
|
||||
installed functions and instances that do not implement the
|
||||
_dispatch method. Alternatively, instances can choose to implement
|
||||
the _get_method_argstring(method_name) method to provide the
|
||||
argument string used in the documentation and the
|
||||
_methodHelp(method_name) method to provide the help text used
|
||||
in the documentation."""
|
||||
|
||||
methods = {}
|
||||
|
||||
for method_name in self.system_listMethods():
|
||||
if method_name in self.funcs:
|
||||
method = self.funcs[method_name]
|
||||
elif self.instance is not None:
|
||||
method_info = [None, None] # argspec, documentation
|
||||
if hasattr(self.instance, '_get_method_argstring'):
|
||||
method_info[0] = self.instance._get_method_argstring(method_name)
|
||||
if hasattr(self.instance, '_methodHelp'):
|
||||
method_info[1] = self.instance._methodHelp(method_name)
|
||||
|
||||
method_info = tuple(method_info)
|
||||
if method_info != (None, None):
|
||||
method = method_info
|
||||
elif not hasattr(self.instance, '_dispatch'):
|
||||
try:
|
||||
method = resolve_dotted_attribute(
|
||||
self.instance,
|
||||
method_name
|
||||
)
|
||||
except AttributeError:
|
||||
method = method_info
|
||||
else:
|
||||
method = method_info
|
||||
else:
|
||||
assert 0, "Could not find method in self.functions and no "\
|
||||
"instance installed"
|
||||
|
||||
methods[method_name] = method
|
||||
|
||||
documenter = ServerHTMLDoc()
|
||||
documentation = documenter.docserver(
|
||||
self.server_name,
|
||||
self.server_documentation,
|
||||
methods
|
||||
)
|
||||
|
||||
return documenter.page(self.server_title, documentation)
|
||||
|
||||
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
||||
"""XML-RPC and documentation request handler class.
|
||||
|
||||
Handles all HTTP POST requests and attempts to decode them as
|
||||
XML-RPC requests.
|
||||
|
||||
Handles all HTTP GET requests and interprets them as requests
|
||||
for documentation.
|
||||
"""
|
||||
|
||||
def do_GET(self):
|
||||
"""Handles the HTTP GET request.
|
||||
|
||||
Interpret all HTTP GET requests as requests for server
|
||||
documentation.
|
||||
"""
|
||||
# Check that the path is legal
|
||||
if not self.is_rpc_path_valid():
|
||||
self.report_404()
|
||||
return
|
||||
|
||||
response = self.server.generate_html_documentation()
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.send_header("Content-length", str(len(response)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response)
|
||||
|
||||
class DocXMLRPCServer( SimpleXMLRPCServer,
|
||||
XMLRPCDocGenerator):
|
||||
"""XML-RPC and HTML documentation server.
|
||||
|
||||
Adds the ability to serve server documentation to the capabilities
|
||||
of SimpleXMLRPCServer.
|
||||
"""
|
||||
|
||||
def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler,
|
||||
logRequests=1, allow_none=False, encoding=None,
|
||||
bind_and_activate=True):
|
||||
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests,
|
||||
allow_none, encoding, bind_and_activate)
|
||||
XMLRPCDocGenerator.__init__(self)
|
||||
|
||||
class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler,
|
||||
XMLRPCDocGenerator):
|
||||
"""Handler for XML-RPC data and documentation requests passed through
|
||||
CGI"""
|
||||
|
||||
def handle_get(self):
|
||||
"""Handles the HTTP GET request.
|
||||
|
||||
Interpret all HTTP GET requests as requests for server
|
||||
documentation.
|
||||
"""
|
||||
|
||||
response = self.generate_html_documentation()
|
||||
|
||||
print 'Content-Type: text/html'
|
||||
print 'Content-Length: %d' % len(response)
|
||||
print
|
||||
sys.stdout.write(response)
|
||||
|
||||
def __init__(self):
|
||||
CGIXMLRPCRequestHandler.__init__(self)
|
||||
XMLRPCDocGenerator.__init__(self)
|
||||
|
|
@ -1,186 +0,0 @@
|
|||
"""Generic MIME writer.
|
||||
|
||||
This module defines the class MimeWriter. The MimeWriter class implements
|
||||
a basic formatter for creating MIME multi-part files. It doesn't seek around
|
||||
the output file nor does it use large amounts of buffer space. You must write
|
||||
the parts out in the order that they should occur in the final file.
|
||||
MimeWriter does buffer the headers you add, allowing you to rearrange their
|
||||
order.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import mimetools
|
||||
|
||||
__all__ = ["MimeWriter"]
|
||||
|
||||
import warnings
|
||||
|
||||
warnings.warn("the MimeWriter module is deprecated; use the email package instead",
|
||||
DeprecationWarning, 2)
|
||||
|
||||
class MimeWriter:
|
||||
|
||||
"""Generic MIME writer.
|
||||
|
||||
Methods:
|
||||
|
||||
__init__()
|
||||
addheader()
|
||||
flushheaders()
|
||||
startbody()
|
||||
startmultipartbody()
|
||||
nextpart()
|
||||
lastpart()
|
||||
|
||||
A MIME writer is much more primitive than a MIME parser. It
|
||||
doesn't seek around on the output file, and it doesn't use large
|
||||
amounts of buffer space, so you have to write the parts in the
|
||||
order they should occur on the output file. It does buffer the
|
||||
headers you add, allowing you to rearrange their order.
|
||||
|
||||
General usage is:
|
||||
|
||||
f = <open the output file>
|
||||
w = MimeWriter(f)
|
||||
...call w.addheader(key, value) 0 or more times...
|
||||
|
||||
followed by either:
|
||||
|
||||
f = w.startbody(content_type)
|
||||
...call f.write(data) for body data...
|
||||
|
||||
or:
|
||||
|
||||
w.startmultipartbody(subtype)
|
||||
for each part:
|
||||
subwriter = w.nextpart()
|
||||
...use the subwriter's methods to create the subpart...
|
||||
w.lastpart()
|
||||
|
||||
The subwriter is another MimeWriter instance, and should be
|
||||
treated in the same way as the toplevel MimeWriter. This way,
|
||||
writing recursive body parts is easy.
|
||||
|
||||
Warning: don't forget to call lastpart()!
|
||||
|
||||
XXX There should be more state so calls made in the wrong order
|
||||
are detected.
|
||||
|
||||
Some special cases:
|
||||
|
||||
- startbody() just returns the file passed to the constructor;
|
||||
but don't use this knowledge, as it may be changed.
|
||||
|
||||
- startmultipartbody() actually returns a file as well;
|
||||
this can be used to write the initial 'if you can read this your
|
||||
mailer is not MIME-aware' message.
|
||||
|
||||
- If you call flushheaders(), the headers accumulated so far are
|
||||
written out (and forgotten); this is useful if you don't need a
|
||||
body part at all, e.g. for a subpart of type message/rfc822
|
||||
that's (mis)used to store some header-like information.
|
||||
|
||||
- Passing a keyword argument 'prefix=<flag>' to addheader(),
|
||||
start*body() affects where the header is inserted; 0 means
|
||||
append at the end, 1 means insert at the start; default is
|
||||
append for addheader(), but insert for start*body(), which use
|
||||
it to determine where the Content-Type header goes.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, fp):
|
||||
self._fp = fp
|
||||
self._headers = []
|
||||
|
||||
def addheader(self, key, value, prefix=0):
|
||||
"""Add a header line to the MIME message.
|
||||
|
||||
The key is the name of the header, where the value obviously provides
|
||||
the value of the header. The optional argument prefix determines
|
||||
where the header is inserted; 0 means append at the end, 1 means
|
||||
insert at the start. The default is to append.
|
||||
|
||||
"""
|
||||
lines = value.split("\n")
|
||||
while lines and not lines[-1]: del lines[-1]
|
||||
while lines and not lines[0]: del lines[0]
|
||||
for i in range(1, len(lines)):
|
||||
lines[i] = " " + lines[i].strip()
|
||||
value = "\n".join(lines) + "\n"
|
||||
line = key + ": " + value
|
||||
if prefix:
|
||||
self._headers.insert(0, line)
|
||||
else:
|
||||
self._headers.append(line)
|
||||
|
||||
def flushheaders(self):
|
||||
"""Writes out and forgets all headers accumulated so far.
|
||||
|
||||
This is useful if you don't need a body part at all; for example,
|
||||
for a subpart of type message/rfc822 that's (mis)used to store some
|
||||
header-like information.
|
||||
|
||||
"""
|
||||
self._fp.writelines(self._headers)
|
||||
self._headers = []
|
||||
|
||||
def startbody(self, ctype, plist=[], prefix=1):
|
||||
"""Returns a file-like object for writing the body of the message.
|
||||
|
||||
The content-type is set to the provided ctype, and the optional
|
||||
parameter, plist, provides additional parameters for the
|
||||
content-type declaration. The optional argument prefix determines
|
||||
where the header is inserted; 0 means append at the end, 1 means
|
||||
insert at the start. The default is to insert at the start.
|
||||
|
||||
"""
|
||||
for name, value in plist:
|
||||
ctype = ctype + ';\n %s=\"%s\"' % (name, value)
|
||||
self.addheader("Content-Type", ctype, prefix=prefix)
|
||||
self.flushheaders()
|
||||
self._fp.write("\n")
|
||||
return self._fp
|
||||
|
||||
def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1):
|
||||
"""Returns a file-like object for writing the body of the message.
|
||||
|
||||
Additionally, this method initializes the multi-part code, where the
|
||||
subtype parameter provides the multipart subtype, the boundary
|
||||
parameter may provide a user-defined boundary specification, and the
|
||||
plist parameter provides optional parameters for the subtype. The
|
||||
optional argument, prefix, determines where the header is inserted;
|
||||
0 means append at the end, 1 means insert at the start. The default
|
||||
is to insert at the start. Subparts should be created using the
|
||||
nextpart() method.
|
||||
|
||||
"""
|
||||
self._boundary = boundary or mimetools.choose_boundary()
|
||||
return self.startbody("multipart/" + subtype,
|
||||
[("boundary", self._boundary)] + plist,
|
||||
prefix=prefix)
|
||||
|
||||
def nextpart(self):
|
||||
"""Returns a new instance of MimeWriter which represents an
|
||||
individual part in a multipart message.
|
||||
|
||||
This may be used to write the part as well as used for creating
|
||||
recursively complex multipart messages. The message must first be
|
||||
initialized with the startmultipartbody() method before using the
|
||||
nextpart() method.
|
||||
|
||||
"""
|
||||
self._fp.write("\n--" + self._boundary + "\n")
|
||||
return self.__class__(self._fp)
|
||||
|
||||
def lastpart(self):
|
||||
"""This is used to designate the last part of a multipart message.
|
||||
|
||||
It should always be used when writing multipart messages.
|
||||
|
||||
"""
|
||||
self._fp.write("\n--" + self._boundary + "--\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import test.test_MimeWriter
|
||||
|
|
@ -1,224 +0,0 @@
|
|||
"""Simple HTTP Server.
|
||||
|
||||
This module builds on BaseHTTPServer by implementing the standard GET
|
||||
and HEAD requests in a fairly straightforward manner.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
__version__ = "0.6"
|
||||
|
||||
__all__ = ["SimpleHTTPRequestHandler"]
|
||||
|
||||
import os
|
||||
import posixpath
|
||||
import BaseHTTPServer
|
||||
import urllib
|
||||
import cgi
|
||||
import sys
|
||||
import shutil
|
||||
import mimetypes
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
"""Simple HTTP request handler with GET and HEAD commands.
|
||||
|
||||
This serves files from the current directory and any of its
|
||||
subdirectories. The MIME type for files is determined by
|
||||
calling the .guess_type() method.
|
||||
|
||||
The GET and HEAD requests are identical except that the HEAD
|
||||
request omits the actual contents of the file.
|
||||
|
||||
"""
|
||||
|
||||
server_version = "SimpleHTTP/" + __version__
|
||||
|
||||
def do_GET(self):
|
||||
"""Serve a GET request."""
|
||||
f = self.send_head()
|
||||
if f:
|
||||
self.copyfile(f, self.wfile)
|
||||
f.close()
|
||||
|
||||
def do_HEAD(self):
|
||||
"""Serve a HEAD request."""
|
||||
f = self.send_head()
|
||||
if f:
|
||||
f.close()
|
||||
|
||||
def send_head(self):
|
||||
"""Common code for GET and HEAD commands.
|
||||
|
||||
This sends the response code and MIME headers.
|
||||
|
||||
Return value is either a file object (which has to be copied
|
||||
to the outputfile by the caller unless the command was HEAD,
|
||||
and must be closed by the caller under all circumstances), or
|
||||
None, in which case the caller has nothing further to do.
|
||||
|
||||
"""
|
||||
path = self.translate_path(self.path)
|
||||
f = None
|
||||
if os.path.isdir(path):
|
||||
if not self.path.endswith('/'):
|
||||
# redirect browser - doing basically what apache does
|
||||
self.send_response(301)
|
||||
self.send_header("Location", self.path + "/")
|
||||
self.end_headers()
|
||||
return None
|
||||
for index in "index.html", "index.htm":
|
||||
index = os.path.join(path, index)
|
||||
if os.path.exists(index):
|
||||
path = index
|
||||
break
|
||||
else:
|
||||
return self.list_directory(path)
|
||||
ctype = self.guess_type(path)
|
||||
try:
|
||||
# Always read in binary mode. Opening files in text mode may cause
|
||||
# newline translations, making the actual size of the content
|
||||
# transmitted *less* than the content-length!
|
||||
f = open(path, 'rb')
|
||||
except IOError:
|
||||
self.send_error(404, "File not found")
|
||||
return None
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", ctype)
|
||||
fs = os.fstat(f.fileno())
|
||||
self.send_header("Content-Length", str(fs[6]))
|
||||
self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
|
||||
self.end_headers()
|
||||
return f
|
||||
|
||||
def list_directory(self, path):
|
||||
"""Helper to produce a directory listing (absent index.html).
|
||||
|
||||
Return value is either a file object, or None (indicating an
|
||||
error). In either case, the headers are sent, making the
|
||||
interface the same as for send_head().
|
||||
|
||||
"""
|
||||
try:
|
||||
list = os.listdir(path)
|
||||
except os.error:
|
||||
self.send_error(404, "No permission to list directory")
|
||||
return None
|
||||
list.sort(key=lambda a: a.lower())
|
||||
f = StringIO()
|
||||
displaypath = cgi.escape(urllib.unquote(self.path))
|
||||
f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
|
||||
f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
|
||||
f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
|
||||
f.write("<hr>\n<ul>\n")
|
||||
for name in list:
|
||||
fullname = os.path.join(path, name)
|
||||
displayname = linkname = name
|
||||
# Append / for directories or @ for symbolic links
|
||||
if os.path.isdir(fullname):
|
||||
displayname = name + "/"
|
||||
linkname = name + "/"
|
||||
if os.path.islink(fullname):
|
||||
displayname = name + "@"
|
||||
# Note: a link to a directory displays with @ and links with /
|
||||
f.write('<li><a href="%s">%s</a>\n'
|
||||
% (urllib.quote(linkname), cgi.escape(displayname)))
|
||||
f.write("</ul>\n<hr>\n</body>\n</html>\n")
|
||||
length = f.tell()
|
||||
f.seek(0)
|
||||
self.send_response(200)
|
||||
encoding = sys.getfilesystemencoding()
|
||||
self.send_header("Content-type", "text/html; charset=%s" % encoding)
|
||||
self.send_header("Content-Length", str(length))
|
||||
self.end_headers()
|
||||
return f
|
||||
|
||||
def translate_path(self, path):
|
||||
"""Translate a /-separated PATH to the local filename syntax.
|
||||
|
||||
Components that mean special things to the local file system
|
||||
(e.g. drive or directory names) are ignored. (XXX They should
|
||||
probably be diagnosed.)
|
||||
|
||||
"""
|
||||
# abandon query parameters
|
||||
path = path.split('?',1)[0]
|
||||
path = path.split('#',1)[0]
|
||||
# Don't forget explicit trailing slash when normalizing. Issue17324
|
||||
trailing_slash = path.rstrip().endswith('/')
|
||||
path = posixpath.normpath(urllib.unquote(path))
|
||||
words = path.split('/')
|
||||
words = filter(None, words)
|
||||
path = os.getcwd()
|
||||
for word in words:
|
||||
drive, word = os.path.splitdrive(word)
|
||||
head, word = os.path.split(word)
|
||||
if word in (os.curdir, os.pardir): continue
|
||||
path = os.path.join(path, word)
|
||||
if trailing_slash:
|
||||
path += '/'
|
||||
return path
|
||||
|
||||
def copyfile(self, source, outputfile):
|
||||
"""Copy all data between two file objects.
|
||||
|
||||
The SOURCE argument is a file object open for reading
|
||||
(or anything with a read() method) and the DESTINATION
|
||||
argument is a file object open for writing (or
|
||||
anything with a write() method).
|
||||
|
||||
The only reason for overriding this would be to change
|
||||
the block size or perhaps to replace newlines by CRLF
|
||||
-- note however that this the default server uses this
|
||||
to copy binary data as well.
|
||||
|
||||
"""
|
||||
shutil.copyfileobj(source, outputfile)
|
||||
|
||||
def guess_type(self, path):
|
||||
"""Guess the type of a file.
|
||||
|
||||
Argument is a PATH (a filename).
|
||||
|
||||
Return value is a string of the form type/subtype,
|
||||
usable for a MIME Content-type header.
|
||||
|
||||
The default implementation looks the file's extension
|
||||
up in the table self.extensions_map, using application/octet-stream
|
||||
as a default; however it would be permissible (if
|
||||
slow) to look inside the data to make a better guess.
|
||||
|
||||
"""
|
||||
|
||||
base, ext = posixpath.splitext(path)
|
||||
if ext in self.extensions_map:
|
||||
return self.extensions_map[ext]
|
||||
ext = ext.lower()
|
||||
if ext in self.extensions_map:
|
||||
return self.extensions_map[ext]
|
||||
else:
|
||||
return self.extensions_map['']
|
||||
|
||||
if not mimetypes.inited:
|
||||
mimetypes.init() # try to read system mime.types
|
||||
extensions_map = mimetypes.types_map.copy()
|
||||
extensions_map.update({
|
||||
'': 'application/octet-stream', # Default
|
||||
'.py': 'text/plain',
|
||||
'.c': 'text/plain',
|
||||
'.h': 'text/plain',
|
||||
})
|
||||
|
||||
|
||||
def test(HandlerClass = SimpleHTTPRequestHandler,
|
||||
ServerClass = BaseHTTPServer.HTTPServer):
|
||||
BaseHTTPServer.test(HandlerClass, ServerClass)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
@ -1,324 +0,0 @@
|
|||
r"""File-like objects that read from or write to a string buffer.
|
||||
|
||||
This implements (nearly) all stdio methods.
|
||||
|
||||
f = StringIO() # ready for writing
|
||||
f = StringIO(buf) # ready for reading
|
||||
f.close() # explicitly release resources held
|
||||
flag = f.isatty() # always false
|
||||
pos = f.tell() # get current position
|
||||
f.seek(pos) # set current position
|
||||
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
|
||||
buf = f.read() # read until EOF
|
||||
buf = f.read(n) # read up to n bytes
|
||||
buf = f.readline() # read until end of line ('\n') or EOF
|
||||
list = f.readlines()# list of f.readline() results until EOF
|
||||
f.truncate([size]) # truncate file at to at most size (default: current pos)
|
||||
f.write(buf) # write at current position
|
||||
f.writelines(list) # for line in list: f.write(line)
|
||||
f.getvalue() # return whole file's contents as a string
|
||||
|
||||
Notes:
|
||||
- Using a real file is often faster (but less convenient).
|
||||
- There's also a much faster implementation in C, called cStringIO, but
|
||||
it's not subclassable.
|
||||
- fileno() is left unimplemented so that code which uses it triggers
|
||||
an exception early.
|
||||
- Seeking far beyond EOF and then writing will insert real null
|
||||
bytes that occupy space in the buffer.
|
||||
- There's a simple test set (see end of this file).
|
||||
"""
|
||||
try:
|
||||
from errno import EINVAL
|
||||
except ImportError:
|
||||
EINVAL = 22
|
||||
|
||||
__all__ = ["StringIO"]
|
||||
|
||||
def _complain_ifclosed(closed):
|
||||
if closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
|
||||
class StringIO:
|
||||
"""class StringIO([buffer])
|
||||
|
||||
When a StringIO object is created, it can be initialized to an existing
|
||||
string by passing the string to the constructor. If no string is given,
|
||||
the StringIO will start empty.
|
||||
|
||||
The StringIO object can accept either Unicode or 8-bit strings, but
|
||||
mixing the two may take some care. If both are used, 8-bit strings that
|
||||
cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
|
||||
a UnicodeError to be raised when getvalue() is called.
|
||||
"""
|
||||
def __init__(self, buf = ''):
|
||||
# Force self.buf to be a string or unicode
|
||||
if not isinstance(buf, basestring):
|
||||
buf = str(buf)
|
||||
self.buf = buf
|
||||
self.len = len(buf)
|
||||
self.buflist = []
|
||||
self.pos = 0
|
||||
self.closed = False
|
||||
self.softspace = 0
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
"""A file object is its own iterator, for example iter(f) returns f
|
||||
(unless f is closed). When a file is used as an iterator, typically
|
||||
in a for loop (for example, for line in f: print line), the next()
|
||||
method is called repeatedly. This method returns the next input line,
|
||||
or raises StopIteration when EOF is hit.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
r = self.readline()
|
||||
if not r:
|
||||
raise StopIteration
|
||||
return r
|
||||
|
||||
def close(self):
|
||||
"""Free the memory buffer.
|
||||
"""
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
del self.buf, self.pos
|
||||
|
||||
def isatty(self):
|
||||
"""Returns False because StringIO objects are not connected to a
|
||||
tty-like device.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
return False
|
||||
|
||||
def seek(self, pos, mode = 0):
|
||||
"""Set the file's current position.
|
||||
|
||||
The mode argument is optional and defaults to 0 (absolute file
|
||||
positioning); other values are 1 (seek relative to the current
|
||||
position) and 2 (seek relative to the file's end).
|
||||
|
||||
There is no return value.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
if self.buflist:
|
||||
self.buf += ''.join(self.buflist)
|
||||
self.buflist = []
|
||||
if mode == 1:
|
||||
pos += self.pos
|
||||
elif mode == 2:
|
||||
pos += self.len
|
||||
self.pos = max(0, pos)
|
||||
|
||||
def tell(self):
|
||||
"""Return the file's current position."""
|
||||
_complain_ifclosed(self.closed)
|
||||
return self.pos
|
||||
|
||||
def read(self, n = -1):
|
||||
"""Read at most size bytes from the file
|
||||
(less if the read hits EOF before obtaining size bytes).
|
||||
|
||||
If the size argument is negative or omitted, read all data until EOF
|
||||
is reached. The bytes are returned as a string object. An empty
|
||||
string is returned when EOF is encountered immediately.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
if self.buflist:
|
||||
self.buf += ''.join(self.buflist)
|
||||
self.buflist = []
|
||||
if n is None or n < 0:
|
||||
newpos = self.len
|
||||
else:
|
||||
newpos = min(self.pos+n, self.len)
|
||||
r = self.buf[self.pos:newpos]
|
||||
self.pos = newpos
|
||||
return r
|
||||
|
||||
def readline(self, length=None):
|
||||
r"""Read one entire line from the file.
|
||||
|
||||
A trailing newline character is kept in the string (but may be absent
|
||||
when a file ends with an incomplete line). If the size argument is
|
||||
present and non-negative, it is a maximum byte count (including the
|
||||
trailing newline) and an incomplete line may be returned.
|
||||
|
||||
An empty string is returned only when EOF is encountered immediately.
|
||||
|
||||
Note: Unlike stdio's fgets(), the returned string contains null
|
||||
characters ('\0') if they occurred in the input.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
if self.buflist:
|
||||
self.buf += ''.join(self.buflist)
|
||||
self.buflist = []
|
||||
i = self.buf.find('\n', self.pos)
|
||||
if i < 0:
|
||||
newpos = self.len
|
||||
else:
|
||||
newpos = i+1
|
||||
if length is not None and length >= 0:
|
||||
if self.pos + length < newpos:
|
||||
newpos = self.pos + length
|
||||
r = self.buf[self.pos:newpos]
|
||||
self.pos = newpos
|
||||
return r
|
||||
|
||||
def readlines(self, sizehint = 0):
|
||||
"""Read until EOF using readline() and return a list containing the
|
||||
lines thus read.
|
||||
|
||||
If the optional sizehint argument is present, instead of reading up
|
||||
to EOF, whole lines totalling approximately sizehint bytes (or more
|
||||
to accommodate a final whole line).
|
||||
"""
|
||||
total = 0
|
||||
lines = []
|
||||
line = self.readline()
|
||||
while line:
|
||||
lines.append(line)
|
||||
total += len(line)
|
||||
if 0 < sizehint <= total:
|
||||
break
|
||||
line = self.readline()
|
||||
return lines
|
||||
|
||||
def truncate(self, size=None):
|
||||
"""Truncate the file's size.
|
||||
|
||||
If the optional size argument is present, the file is truncated to
|
||||
(at most) that size. The size defaults to the current position.
|
||||
The current file position is not changed unless the position
|
||||
is beyond the new file size.
|
||||
|
||||
If the specified size exceeds the file's current size, the
|
||||
file remains unchanged.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
if size is None:
|
||||
size = self.pos
|
||||
elif size < 0:
|
||||
raise IOError(EINVAL, "Negative size not allowed")
|
||||
elif size < self.pos:
|
||||
self.pos = size
|
||||
self.buf = self.getvalue()[:size]
|
||||
self.len = size
|
||||
|
||||
def write(self, s):
|
||||
"""Write a string to the file.
|
||||
|
||||
There is no return value.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
if not s: return
|
||||
# Force s to be a string or unicode
|
||||
if not isinstance(s, basestring):
|
||||
s = str(s)
|
||||
spos = self.pos
|
||||
slen = self.len
|
||||
if spos == slen:
|
||||
self.buflist.append(s)
|
||||
self.len = self.pos = spos + len(s)
|
||||
return
|
||||
if spos > slen:
|
||||
self.buflist.append('\0'*(spos - slen))
|
||||
slen = spos
|
||||
newpos = spos + len(s)
|
||||
if spos < slen:
|
||||
if self.buflist:
|
||||
self.buf += ''.join(self.buflist)
|
||||
self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
|
||||
self.buf = ''
|
||||
if newpos > slen:
|
||||
slen = newpos
|
||||
else:
|
||||
self.buflist.append(s)
|
||||
slen = newpos
|
||||
self.len = slen
|
||||
self.pos = newpos
|
||||
|
||||
def writelines(self, iterable):
|
||||
"""Write a sequence of strings to the file. The sequence can be any
|
||||
iterable object producing strings, typically a list of strings. There
|
||||
is no return value.
|
||||
|
||||
(The name is intended to match readlines(); writelines() does not add
|
||||
line separators.)
|
||||
"""
|
||||
write = self.write
|
||||
for line in iterable:
|
||||
write(line)
|
||||
|
||||
def flush(self):
|
||||
"""Flush the internal buffer
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
|
||||
def getvalue(self):
|
||||
"""
|
||||
Retrieve the entire contents of the "file" at any time before
|
||||
the StringIO object's close() method is called.
|
||||
|
||||
The StringIO object can accept either Unicode or 8-bit strings,
|
||||
but mixing the two may take some care. If both are used, 8-bit
|
||||
strings that cannot be interpreted as 7-bit ASCII (that use the
|
||||
8th bit) will cause a UnicodeError to be raised when getvalue()
|
||||
is called.
|
||||
"""
|
||||
_complain_ifclosed(self.closed)
|
||||
if self.buflist:
|
||||
self.buf += ''.join(self.buflist)
|
||||
self.buflist = []
|
||||
return self.buf
|
||||
|
||||
|
||||
# A little test suite
|
||||
|
||||
def test():
|
||||
import sys
|
||||
if sys.argv[1:]:
|
||||
file = sys.argv[1]
|
||||
else:
|
||||
file = '/etc/passwd'
|
||||
lines = open(file, 'r').readlines()
|
||||
text = open(file, 'r').read()
|
||||
f = StringIO()
|
||||
for line in lines[:-2]:
|
||||
f.write(line)
|
||||
f.writelines(lines[-2:])
|
||||
if f.getvalue() != text:
|
||||
raise RuntimeError, 'write failed'
|
||||
length = f.tell()
|
||||
print 'File length =', length
|
||||
f.seek(len(lines[0]))
|
||||
f.write(lines[1])
|
||||
f.seek(0)
|
||||
print 'First line =', repr(f.readline())
|
||||
print 'Position =', f.tell()
|
||||
line = f.readline()
|
||||
print 'Second line =', repr(line)
|
||||
f.seek(-len(line), 1)
|
||||
line2 = f.read(len(line))
|
||||
if line != line2:
|
||||
raise RuntimeError, 'bad result after seek back'
|
||||
f.seek(len(line2), 1)
|
||||
list = f.readlines()
|
||||
line = list[-1]
|
||||
f.seek(f.tell() - len(line))
|
||||
line2 = f.read()
|
||||
if line != line2:
|
||||
raise RuntimeError, 'bad result after seek back from EOF'
|
||||
print 'Read', len(list), 'more lines'
|
||||
print 'File length =', f.tell()
|
||||
if f.tell() != length:
|
||||
raise RuntimeError, 'bad length'
|
||||
f.truncate(length/2)
|
||||
f.seek(0, 2)
|
||||
print 'Truncated length =', f.tell()
|
||||
if f.tell() != length/2:
|
||||
raise RuntimeError, 'truncate did not adjust length'
|
||||
f.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
"""A more or less complete user-defined wrapper around dictionary objects."""
|
||||
|
||||
class UserDict:
|
||||
def __init__(self, dict=None, **kwargs):
|
||||
self.data = {}
|
||||
if dict is not None:
|
||||
self.update(dict)
|
||||
if len(kwargs):
|
||||
self.update(kwargs)
|
||||
def __repr__(self): return repr(self.data)
|
||||
def __cmp__(self, dict):
|
||||
if isinstance(dict, UserDict):
|
||||
return cmp(self.data, dict.data)
|
||||
else:
|
||||
return cmp(self.data, dict)
|
||||
__hash__ = None # Avoid Py3k warning
|
||||
def __len__(self): return len(self.data)
|
||||
def __getitem__(self, key):
|
||||
if key in self.data:
|
||||
return self.data[key]
|
||||
if hasattr(self.__class__, "__missing__"):
|
||||
return self.__class__.__missing__(self, key)
|
||||
raise KeyError(key)
|
||||
def __setitem__(self, key, item): self.data[key] = item
|
||||
def __delitem__(self, key): del self.data[key]
|
||||
def clear(self): self.data.clear()
|
||||
def copy(self):
|
||||
if self.__class__ is UserDict:
|
||||
return UserDict(self.data.copy())
|
||||
import copy
|
||||
data = self.data
|
||||
try:
|
||||
self.data = {}
|
||||
c = copy.copy(self)
|
||||
finally:
|
||||
self.data = data
|
||||
c.update(self)
|
||||
return c
|
||||
def keys(self): return self.data.keys()
|
||||
def items(self): return self.data.items()
|
||||
def iteritems(self): return self.data.iteritems()
|
||||
def iterkeys(self): return self.data.iterkeys()
|
||||
def itervalues(self): return self.data.itervalues()
|
||||
def values(self): return self.data.values()
|
||||
def has_key(self, key): return key in self.data
|
||||
def update(self, dict=None, **kwargs):
|
||||
if dict is None:
|
||||
pass
|
||||
elif isinstance(dict, UserDict):
|
||||
self.data.update(dict.data)
|
||||
elif isinstance(dict, type({})) or not hasattr(dict, 'items'):
|
||||
self.data.update(dict)
|
||||
else:
|
||||
for k, v in dict.items():
|
||||
self[k] = v
|
||||
if len(kwargs):
|
||||
self.data.update(kwargs)
|
||||
def get(self, key, failobj=None):
|
||||
if key not in self:
|
||||
return failobj
|
||||
return self[key]
|
||||
def setdefault(self, key, failobj=None):
|
||||
if key not in self:
|
||||
self[key] = failobj
|
||||
return self[key]
|
||||
def pop(self, key, *args):
|
||||
return self.data.pop(key, *args)
|
||||
def popitem(self):
|
||||
return self.data.popitem()
|
||||
def __contains__(self, key):
|
||||
return key in self.data
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
class IterableUserDict(UserDict):
|
||||
def __iter__(self):
|
||||
return iter(self.data)
|
||||
|
||||
import _abcoll
|
||||
_abcoll.MutableMapping.register(IterableUserDict)
|
||||
|
||||
|
||||
class DictMixin:
|
||||
# Mixin defining all dictionary methods for classes that already have
|
||||
# a minimum dictionary interface including getitem, setitem, delitem,
|
||||
# and keys. Without knowledge of the subclass constructor, the mixin
|
||||
# does not define __init__() or copy(). In addition to the four base
|
||||
# methods, progressively more efficiency comes with defining
|
||||
# __contains__(), __iter__(), and iteritems().
|
||||
|
||||
# second level definitions support higher levels
|
||||
def __iter__(self):
|
||||
for k in self.keys():
|
||||
yield k
|
||||
def has_key(self, key):
|
||||
try:
|
||||
self[key]
|
||||
except KeyError:
|
||||
return False
|
||||
return True
|
||||
def __contains__(self, key):
|
||||
return self.has_key(key)
|
||||
|
||||
# third level takes advantage of second level definitions
|
||||
def iteritems(self):
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
# fourth level uses definitions from lower levels
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
def values(self):
|
||||
return [v for _, v in self.iteritems()]
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
def clear(self):
|
||||
for key in self.keys():
|
||||
del self[key]
|
||||
def setdefault(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
def pop(self, key, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError, "pop expected at most 2 arguments, got "\
|
||||
+ repr(1 + len(args))
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if args:
|
||||
return args[0]
|
||||
raise
|
||||
del self[key]
|
||||
return value
|
||||
def popitem(self):
|
||||
try:
|
||||
k, v = self.iteritems().next()
|
||||
except StopIteration:
|
||||
raise KeyError, 'container is empty'
|
||||
del self[k]
|
||||
return (k, v)
|
||||
def update(self, other=None, **kwargs):
|
||||
# Make progressively weaker assumptions about "other"
|
||||
if other is None:
|
||||
pass
|
||||
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
|
||||
for k, v in other.iteritems():
|
||||
self[k] = v
|
||||
elif hasattr(other, 'keys'):
|
||||
for k in other.keys():
|
||||
self[k] = other[k]
|
||||
else:
|
||||
for k, v in other:
|
||||
self[k] = v
|
||||
if kwargs:
|
||||
self.update(kwargs)
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
def __repr__(self):
|
||||
return repr(dict(self.iteritems()))
|
||||
def __cmp__(self, other):
|
||||
if other is None:
|
||||
return 1
|
||||
if isinstance(other, DictMixin):
|
||||
other = dict(other.iteritems())
|
||||
return cmp(dict(self.iteritems()), other)
|
||||
def __len__(self):
|
||||
return len(self.keys())
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
"""A more or less complete user-defined wrapper around list objects."""
|
||||
|
||||
import collections
|
||||
|
||||
class UserList(collections.MutableSequence):
|
||||
def __init__(self, initlist=None):
|
||||
self.data = []
|
||||
if initlist is not None:
|
||||
# XXX should this accept an arbitrary sequence?
|
||||
if type(initlist) == type(self.data):
|
||||
self.data[:] = initlist
|
||||
elif isinstance(initlist, UserList):
|
||||
self.data[:] = initlist.data[:]
|
||||
else:
|
||||
self.data = list(initlist)
|
||||
def __repr__(self): return repr(self.data)
|
||||
def __lt__(self, other): return self.data < self.__cast(other)
|
||||
def __le__(self, other): return self.data <= self.__cast(other)
|
||||
def __eq__(self, other): return self.data == self.__cast(other)
|
||||
def __ne__(self, other): return self.data != self.__cast(other)
|
||||
def __gt__(self, other): return self.data > self.__cast(other)
|
||||
def __ge__(self, other): return self.data >= self.__cast(other)
|
||||
def __cast(self, other):
|
||||
if isinstance(other, UserList): return other.data
|
||||
else: return other
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.data, self.__cast(other))
|
||||
__hash__ = None # Mutable sequence, so not hashable
|
||||
def __contains__(self, item): return item in self.data
|
||||
def __len__(self): return len(self.data)
|
||||
def __getitem__(self, i): return self.data[i]
|
||||
def __setitem__(self, i, item): self.data[i] = item
|
||||
def __delitem__(self, i): del self.data[i]
|
||||
def __getslice__(self, i, j):
|
||||
i = max(i, 0); j = max(j, 0)
|
||||
return self.__class__(self.data[i:j])
|
||||
def __setslice__(self, i, j, other):
|
||||
i = max(i, 0); j = max(j, 0)
|
||||
if isinstance(other, UserList):
|
||||
self.data[i:j] = other.data
|
||||
elif isinstance(other, type(self.data)):
|
||||
self.data[i:j] = other
|
||||
else:
|
||||
self.data[i:j] = list(other)
|
||||
def __delslice__(self, i, j):
|
||||
i = max(i, 0); j = max(j, 0)
|
||||
del self.data[i:j]
|
||||
def __add__(self, other):
|
||||
if isinstance(other, UserList):
|
||||
return self.__class__(self.data + other.data)
|
||||
elif isinstance(other, type(self.data)):
|
||||
return self.__class__(self.data + other)
|
||||
else:
|
||||
return self.__class__(self.data + list(other))
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, UserList):
|
||||
return self.__class__(other.data + self.data)
|
||||
elif isinstance(other, type(self.data)):
|
||||
return self.__class__(other + self.data)
|
||||
else:
|
||||
return self.__class__(list(other) + self.data)
|
||||
def __iadd__(self, other):
|
||||
if isinstance(other, UserList):
|
||||
self.data += other.data
|
||||
elif isinstance(other, type(self.data)):
|
||||
self.data += other
|
||||
else:
|
||||
self.data += list(other)
|
||||
return self
|
||||
def __mul__(self, n):
|
||||
return self.__class__(self.data*n)
|
||||
__rmul__ = __mul__
|
||||
def __imul__(self, n):
|
||||
self.data *= n
|
||||
return self
|
||||
def append(self, item): self.data.append(item)
|
||||
def insert(self, i, item): self.data.insert(i, item)
|
||||
def pop(self, i=-1): return self.data.pop(i)
|
||||
def remove(self, item): self.data.remove(item)
|
||||
def count(self, item): return self.data.count(item)
|
||||
def index(self, item, *args): return self.data.index(item, *args)
|
||||
def reverse(self): self.data.reverse()
|
||||
def sort(self, *args, **kwds): self.data.sort(*args, **kwds)
|
||||
def extend(self, other):
|
||||
if isinstance(other, UserList):
|
||||
self.data.extend(other.data)
|
||||
else:
|
||||
self.data.extend(other)
|
||||
|
|
@ -1,228 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
## vim:ts=4:et:nowrap
|
||||
"""A user-defined wrapper around string objects
|
||||
|
||||
Note: string objects have grown methods in Python 1.6
|
||||
This module requires Python 1.6 or later.
|
||||
"""
|
||||
import sys
|
||||
import collections
|
||||
|
||||
__all__ = ["UserString","MutableString"]
|
||||
|
||||
class UserString(collections.Sequence):
|
||||
def __init__(self, seq):
|
||||
if isinstance(seq, basestring):
|
||||
self.data = seq
|
||||
elif isinstance(seq, UserString):
|
||||
self.data = seq.data[:]
|
||||
else:
|
||||
self.data = str(seq)
|
||||
def __str__(self): return str(self.data)
|
||||
def __repr__(self): return repr(self.data)
|
||||
def __int__(self): return int(self.data)
|
||||
def __long__(self): return long(self.data)
|
||||
def __float__(self): return float(self.data)
|
||||
def __complex__(self): return complex(self.data)
|
||||
def __hash__(self): return hash(self.data)
|
||||
|
||||
def __cmp__(self, string):
|
||||
if isinstance(string, UserString):
|
||||
return cmp(self.data, string.data)
|
||||
else:
|
||||
return cmp(self.data, string)
|
||||
def __contains__(self, char):
|
||||
return char in self.data
|
||||
|
||||
def __len__(self): return len(self.data)
|
||||
def __getitem__(self, index): return self.__class__(self.data[index])
|
||||
def __getslice__(self, start, end):
|
||||
start = max(start, 0); end = max(end, 0)
|
||||
return self.__class__(self.data[start:end])
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, UserString):
|
||||
return self.__class__(self.data + other.data)
|
||||
elif isinstance(other, basestring):
|
||||
return self.__class__(self.data + other)
|
||||
else:
|
||||
return self.__class__(self.data + str(other))
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, basestring):
|
||||
return self.__class__(other + self.data)
|
||||
else:
|
||||
return self.__class__(str(other) + self.data)
|
||||
def __mul__(self, n):
|
||||
return self.__class__(self.data*n)
|
||||
__rmul__ = __mul__
|
||||
def __mod__(self, args):
|
||||
return self.__class__(self.data % args)
|
||||
|
||||
# the following methods are defined in alphabetical order:
|
||||
def capitalize(self): return self.__class__(self.data.capitalize())
|
||||
def center(self, width, *args):
|
||||
return self.__class__(self.data.center(width, *args))
|
||||
def count(self, sub, start=0, end=sys.maxint):
|
||||
return self.data.count(sub, start, end)
|
||||
def decode(self, encoding=None, errors=None): # XXX improve this?
|
||||
if encoding:
|
||||
if errors:
|
||||
return self.__class__(self.data.decode(encoding, errors))
|
||||
else:
|
||||
return self.__class__(self.data.decode(encoding))
|
||||
else:
|
||||
return self.__class__(self.data.decode())
|
||||
def encode(self, encoding=None, errors=None): # XXX improve this?
|
||||
if encoding:
|
||||
if errors:
|
||||
return self.__class__(self.data.encode(encoding, errors))
|
||||
else:
|
||||
return self.__class__(self.data.encode(encoding))
|
||||
else:
|
||||
return self.__class__(self.data.encode())
|
||||
def endswith(self, suffix, start=0, end=sys.maxint):
|
||||
return self.data.endswith(suffix, start, end)
|
||||
def expandtabs(self, tabsize=8):
|
||||
return self.__class__(self.data.expandtabs(tabsize))
|
||||
def find(self, sub, start=0, end=sys.maxint):
|
||||
return self.data.find(sub, start, end)
|
||||
def index(self, sub, start=0, end=sys.maxint):
|
||||
return self.data.index(sub, start, end)
|
||||
def isalpha(self): return self.data.isalpha()
|
||||
def isalnum(self): return self.data.isalnum()
|
||||
def isdecimal(self): return self.data.isdecimal()
|
||||
def isdigit(self): return self.data.isdigit()
|
||||
def islower(self): return self.data.islower()
|
||||
def isnumeric(self): return self.data.isnumeric()
|
||||
def isspace(self): return self.data.isspace()
|
||||
def istitle(self): return self.data.istitle()
|
||||
def isupper(self): return self.data.isupper()
|
||||
def join(self, seq): return self.data.join(seq)
|
||||
def ljust(self, width, *args):
|
||||
return self.__class__(self.data.ljust(width, *args))
|
||||
def lower(self): return self.__class__(self.data.lower())
|
||||
def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
|
||||
def partition(self, sep):
|
||||
return self.data.partition(sep)
|
||||
def replace(self, old, new, maxsplit=-1):
|
||||
return self.__class__(self.data.replace(old, new, maxsplit))
|
||||
def rfind(self, sub, start=0, end=sys.maxint):
|
||||
return self.data.rfind(sub, start, end)
|
||||
def rindex(self, sub, start=0, end=sys.maxint):
|
||||
return self.data.rindex(sub, start, end)
|
||||
def rjust(self, width, *args):
|
||||
return self.__class__(self.data.rjust(width, *args))
|
||||
def rpartition(self, sep):
|
||||
return self.data.rpartition(sep)
|
||||
def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
|
||||
def split(self, sep=None, maxsplit=-1):
|
||||
return self.data.split(sep, maxsplit)
|
||||
def rsplit(self, sep=None, maxsplit=-1):
|
||||
return self.data.rsplit(sep, maxsplit)
|
||||
def splitlines(self, keepends=0): return self.data.splitlines(keepends)
|
||||
def startswith(self, prefix, start=0, end=sys.maxint):
|
||||
return self.data.startswith(prefix, start, end)
|
||||
def strip(self, chars=None): return self.__class__(self.data.strip(chars))
|
||||
def swapcase(self): return self.__class__(self.data.swapcase())
|
||||
def title(self): return self.__class__(self.data.title())
|
||||
def translate(self, *args):
|
||||
return self.__class__(self.data.translate(*args))
|
||||
def upper(self): return self.__class__(self.data.upper())
|
||||
def zfill(self, width): return self.__class__(self.data.zfill(width))
|
||||
|
||||
class MutableString(UserString, collections.MutableSequence):
|
||||
"""mutable string objects
|
||||
|
||||
Python strings are immutable objects. This has the advantage, that
|
||||
strings may be used as dictionary keys. If this property isn't needed
|
||||
and you insist on changing string values in place instead, you may cheat
|
||||
and use MutableString.
|
||||
|
||||
But the purpose of this class is an educational one: to prevent
|
||||
people from inventing their own mutable string class derived
|
||||
from UserString and than forget thereby to remove (override) the
|
||||
__hash__ method inherited from UserString. This would lead to
|
||||
errors that would be very hard to track down.
|
||||
|
||||
A faster and better solution is to rewrite your program using lists."""
|
||||
def __init__(self, string=""):
|
||||
from warnings import warnpy3k
|
||||
warnpy3k('the class UserString.MutableString has been removed in '
|
||||
'Python 3.0', stacklevel=2)
|
||||
self.data = string
|
||||
|
||||
# We inherit object.__hash__, so we must deny this explicitly
|
||||
__hash__ = None
|
||||
|
||||
def __setitem__(self, index, sub):
|
||||
if isinstance(index, slice):
|
||||
if isinstance(sub, UserString):
|
||||
sub = sub.data
|
||||
elif not isinstance(sub, basestring):
|
||||
sub = str(sub)
|
||||
start, stop, step = index.indices(len(self.data))
|
||||
if step == -1:
|
||||
start, stop = stop+1, start+1
|
||||
sub = sub[::-1]
|
||||
elif step != 1:
|
||||
# XXX(twouters): I guess we should be reimplementing
|
||||
# the extended slice assignment/deletion algorithm here...
|
||||
raise TypeError, "invalid step in slicing assignment"
|
||||
start = min(start, stop)
|
||||
self.data = self.data[:start] + sub + self.data[stop:]
|
||||
else:
|
||||
if index < 0:
|
||||
index += len(self.data)
|
||||
if index < 0 or index >= len(self.data): raise IndexError
|
||||
self.data = self.data[:index] + sub + self.data[index+1:]
|
||||
def __delitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
start, stop, step = index.indices(len(self.data))
|
||||
if step == -1:
|
||||
start, stop = stop+1, start+1
|
||||
elif step != 1:
|
||||
# XXX(twouters): see same block in __setitem__
|
||||
raise TypeError, "invalid step in slicing deletion"
|
||||
start = min(start, stop)
|
||||
self.data = self.data[:start] + self.data[stop:]
|
||||
else:
|
||||
if index < 0:
|
||||
index += len(self.data)
|
||||
if index < 0 or index >= len(self.data): raise IndexError
|
||||
self.data = self.data[:index] + self.data[index+1:]
|
||||
def __setslice__(self, start, end, sub):
|
||||
start = max(start, 0); end = max(end, 0)
|
||||
if isinstance(sub, UserString):
|
||||
self.data = self.data[:start]+sub.data+self.data[end:]
|
||||
elif isinstance(sub, basestring):
|
||||
self.data = self.data[:start]+sub+self.data[end:]
|
||||
else:
|
||||
self.data = self.data[:start]+str(sub)+self.data[end:]
|
||||
def __delslice__(self, start, end):
|
||||
start = max(start, 0); end = max(end, 0)
|
||||
self.data = self.data[:start] + self.data[end:]
|
||||
def immutable(self):
|
||||
return UserString(self.data)
|
||||
def __iadd__(self, other):
|
||||
if isinstance(other, UserString):
|
||||
self.data += other.data
|
||||
elif isinstance(other, basestring):
|
||||
self.data += other
|
||||
else:
|
||||
self.data += str(other)
|
||||
return self
|
||||
def __imul__(self, n):
|
||||
self.data *= n
|
||||
return self
|
||||
def insert(self, index, value):
|
||||
self[index:index] = value
|
||||
|
||||
if __name__ == "__main__":
|
||||
# execute the regression test to stdout, if called as a script:
|
||||
import os
|
||||
called_in_dir, called_as = os.path.split(sys.argv[0])
|
||||
called_as, py = os.path.splitext(called_as)
|
||||
if '-q' in sys.argv:
|
||||
from test import test_support
|
||||
test_support.verbose = 0
|
||||
__import__('test.test_' + called_as.lower())
|
||||
|
|
@ -1,170 +0,0 @@
|
|||
"""Load / save to libwww-perl (LWP) format files.
|
||||
|
||||
Actually, the format is slightly extended from that used by LWP's
|
||||
(libwww-perl's) HTTP::Cookies, to avoid losing some RFC 2965 information
|
||||
not recorded by LWP.
|
||||
|
||||
It uses the version string "2.0", though really there isn't an LWP Cookies
|
||||
2.0 format. This indicates that there is extra information in here
|
||||
(domain_dot and # port_spec) while still being compatible with
|
||||
libwww-perl, I hope.
|
||||
|
||||
"""
|
||||
|
||||
import time, re
|
||||
from cookielib import (_warn_unhandled_exception, FileCookieJar, LoadError,
|
||||
Cookie, MISSING_FILENAME_TEXT,
|
||||
join_header_words, split_header_words,
|
||||
iso2time, time2isoz)
|
||||
|
||||
def lwp_cookie_str(cookie):
|
||||
"""Return string representation of Cookie in an the LWP cookie file format.
|
||||
|
||||
Actually, the format is extended a bit -- see module docstring.
|
||||
|
||||
"""
|
||||
h = [(cookie.name, cookie.value),
|
||||
("path", cookie.path),
|
||||
("domain", cookie.domain)]
|
||||
if cookie.port is not None: h.append(("port", cookie.port))
|
||||
if cookie.path_specified: h.append(("path_spec", None))
|
||||
if cookie.port_specified: h.append(("port_spec", None))
|
||||
if cookie.domain_initial_dot: h.append(("domain_dot", None))
|
||||
if cookie.secure: h.append(("secure", None))
|
||||
if cookie.expires: h.append(("expires",
|
||||
time2isoz(float(cookie.expires))))
|
||||
if cookie.discard: h.append(("discard", None))
|
||||
if cookie.comment: h.append(("comment", cookie.comment))
|
||||
if cookie.comment_url: h.append(("commenturl", cookie.comment_url))
|
||||
|
||||
keys = cookie._rest.keys()
|
||||
keys.sort()
|
||||
for k in keys:
|
||||
h.append((k, str(cookie._rest[k])))
|
||||
|
||||
h.append(("version", str(cookie.version)))
|
||||
|
||||
return join_header_words([h])
|
||||
|
||||
class LWPCookieJar(FileCookieJar):
|
||||
"""
|
||||
The LWPCookieJar saves a sequence of "Set-Cookie3" lines.
|
||||
"Set-Cookie3" is the format used by the libwww-perl libary, not known
|
||||
to be compatible with any browser, but which is easy to read and
|
||||
doesn't lose information about RFC 2965 cookies.
|
||||
|
||||
Additional methods
|
||||
|
||||
as_lwp_str(ignore_discard=True, ignore_expired=True)
|
||||
|
||||
"""
|
||||
|
||||
def as_lwp_str(self, ignore_discard=True, ignore_expires=True):
|
||||
"""Return cookies as a string of "\\n"-separated "Set-Cookie3" headers.
|
||||
|
||||
ignore_discard and ignore_expires: see docstring for FileCookieJar.save
|
||||
|
||||
"""
|
||||
now = time.time()
|
||||
r = []
|
||||
for cookie in self:
|
||||
if not ignore_discard and cookie.discard:
|
||||
continue
|
||||
if not ignore_expires and cookie.is_expired(now):
|
||||
continue
|
||||
r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie))
|
||||
return "\n".join(r+[""])
|
||||
|
||||
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
|
||||
if filename is None:
|
||||
if self.filename is not None: filename = self.filename
|
||||
else: raise ValueError(MISSING_FILENAME_TEXT)
|
||||
|
||||
f = open(filename, "w")
|
||||
try:
|
||||
# There really isn't an LWP Cookies 2.0 format, but this indicates
|
||||
# that there is extra information in here (domain_dot and
|
||||
# port_spec) while still being compatible with libwww-perl, I hope.
|
||||
f.write("#LWP-Cookies-2.0\n")
|
||||
f.write(self.as_lwp_str(ignore_discard, ignore_expires))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def _really_load(self, f, filename, ignore_discard, ignore_expires):
|
||||
magic = f.readline()
|
||||
if not re.search(self.magic_re, magic):
|
||||
msg = ("%r does not look like a Set-Cookie3 (LWP) format "
|
||||
"file" % filename)
|
||||
raise LoadError(msg)
|
||||
|
||||
now = time.time()
|
||||
|
||||
header = "Set-Cookie3:"
|
||||
boolean_attrs = ("port_spec", "path_spec", "domain_dot",
|
||||
"secure", "discard")
|
||||
value_attrs = ("version",
|
||||
"port", "path", "domain",
|
||||
"expires",
|
||||
"comment", "commenturl")
|
||||
|
||||
try:
|
||||
while 1:
|
||||
line = f.readline()
|
||||
if line == "": break
|
||||
if not line.startswith(header):
|
||||
continue
|
||||
line = line[len(header):].strip()
|
||||
|
||||
for data in split_header_words([line]):
|
||||
name, value = data[0]
|
||||
standard = {}
|
||||
rest = {}
|
||||
for k in boolean_attrs:
|
||||
standard[k] = False
|
||||
for k, v in data[1:]:
|
||||
if k is not None:
|
||||
lc = k.lower()
|
||||
else:
|
||||
lc = None
|
||||
# don't lose case distinction for unknown fields
|
||||
if (lc in value_attrs) or (lc in boolean_attrs):
|
||||
k = lc
|
||||
if k in boolean_attrs:
|
||||
if v is None: v = True
|
||||
standard[k] = v
|
||||
elif k in value_attrs:
|
||||
standard[k] = v
|
||||
else:
|
||||
rest[k] = v
|
||||
|
||||
h = standard.get
|
||||
expires = h("expires")
|
||||
discard = h("discard")
|
||||
if expires is not None:
|
||||
expires = iso2time(expires)
|
||||
if expires is None:
|
||||
discard = True
|
||||
domain = h("domain")
|
||||
domain_specified = domain.startswith(".")
|
||||
c = Cookie(h("version"), name, value,
|
||||
h("port"), h("port_spec"),
|
||||
domain, domain_specified, h("domain_dot"),
|
||||
h("path"), h("path_spec"),
|
||||
h("secure"),
|
||||
expires,
|
||||
discard,
|
||||
h("comment"),
|
||||
h("commenturl"),
|
||||
rest)
|
||||
if not ignore_discard and c.discard:
|
||||
continue
|
||||
if not ignore_expires and c.is_expired(now):
|
||||
continue
|
||||
self.set_cookie(c)
|
||||
|
||||
except IOError:
|
||||
raise
|
||||
except Exception:
|
||||
_warn_unhandled_exception()
|
||||
raise LoadError("invalid Set-Cookie3 format file %r: %r" %
|
||||
(filename, line))
|
||||
|
|
@ -1,149 +0,0 @@
|
|||
"""Mozilla / Netscape cookie loading / saving."""
|
||||
|
||||
import re, time
|
||||
|
||||
from cookielib import (_warn_unhandled_exception, FileCookieJar, LoadError,
|
||||
Cookie, MISSING_FILENAME_TEXT)
|
||||
|
||||
class MozillaCookieJar(FileCookieJar):
|
||||
"""
|
||||
|
||||
WARNING: you may want to backup your browser's cookies file if you use
|
||||
this class to save cookies. I *think* it works, but there have been
|
||||
bugs in the past!
|
||||
|
||||
This class differs from CookieJar only in the format it uses to save and
|
||||
load cookies to and from a file. This class uses the Mozilla/Netscape
|
||||
`cookies.txt' format. lynx uses this file format, too.
|
||||
|
||||
Don't expect cookies saved while the browser is running to be noticed by
|
||||
the browser (in fact, Mozilla on unix will overwrite your saved cookies if
|
||||
you change them on disk while it's running; on Windows, you probably can't
|
||||
save at all while the browser is running).
|
||||
|
||||
Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to
|
||||
Netscape cookies on saving.
|
||||
|
||||
In particular, the cookie version and port number information is lost,
|
||||
together with information about whether or not Path, Port and Discard were
|
||||
specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the
|
||||
domain as set in the HTTP header started with a dot (yes, I'm aware some
|
||||
domains in Netscape files start with a dot and some don't -- trust me, you
|
||||
really don't want to know any more about this).
|
||||
|
||||
Note that though Mozilla and Netscape use the same format, they use
|
||||
slightly different headers. The class saves cookies using the Netscape
|
||||
header by default (Mozilla can cope with that).
|
||||
|
||||
"""
|
||||
magic_re = "#( Netscape)? HTTP Cookie File"
|
||||
header = """\
|
||||
# Netscape HTTP Cookie File
|
||||
# http://www.netscape.com/newsref/std/cookie_spec.html
|
||||
# This is a generated file! Do not edit.
|
||||
|
||||
"""
|
||||
|
||||
def _really_load(self, f, filename, ignore_discard, ignore_expires):
|
||||
now = time.time()
|
||||
|
||||
magic = f.readline()
|
||||
if not re.search(self.magic_re, magic):
|
||||
f.close()
|
||||
raise LoadError(
|
||||
"%r does not look like a Netscape format cookies file" %
|
||||
filename)
|
||||
|
||||
try:
|
||||
while 1:
|
||||
line = f.readline()
|
||||
if line == "": break
|
||||
|
||||
# last field may be absent, so keep any trailing tab
|
||||
if line.endswith("\n"): line = line[:-1]
|
||||
|
||||
# skip comments and blank lines XXX what is $ for?
|
||||
if (line.strip().startswith(("#", "$")) or
|
||||
line.strip() == ""):
|
||||
continue
|
||||
|
||||
domain, domain_specified, path, secure, expires, name, value = \
|
||||
line.split("\t")
|
||||
secure = (secure == "TRUE")
|
||||
domain_specified = (domain_specified == "TRUE")
|
||||
if name == "":
|
||||
# cookies.txt regards 'Set-Cookie: foo' as a cookie
|
||||
# with no name, whereas cookielib regards it as a
|
||||
# cookie with no value.
|
||||
name = value
|
||||
value = None
|
||||
|
||||
initial_dot = domain.startswith(".")
|
||||
assert domain_specified == initial_dot
|
||||
|
||||
discard = False
|
||||
if expires == "":
|
||||
expires = None
|
||||
discard = True
|
||||
|
||||
# assume path_specified is false
|
||||
c = Cookie(0, name, value,
|
||||
None, False,
|
||||
domain, domain_specified, initial_dot,
|
||||
path, False,
|
||||
secure,
|
||||
expires,
|
||||
discard,
|
||||
None,
|
||||
None,
|
||||
{})
|
||||
if not ignore_discard and c.discard:
|
||||
continue
|
||||
if not ignore_expires and c.is_expired(now):
|
||||
continue
|
||||
self.set_cookie(c)
|
||||
|
||||
except IOError:
|
||||
raise
|
||||
except Exception:
|
||||
_warn_unhandled_exception()
|
||||
raise LoadError("invalid Netscape format cookies file %r: %r" %
|
||||
(filename, line))
|
||||
|
||||
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
|
||||
if filename is None:
|
||||
if self.filename is not None: filename = self.filename
|
||||
else: raise ValueError(MISSING_FILENAME_TEXT)
|
||||
|
||||
f = open(filename, "w")
|
||||
try:
|
||||
f.write(self.header)
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if not ignore_discard and cookie.discard:
|
||||
continue
|
||||
if not ignore_expires and cookie.is_expired(now):
|
||||
continue
|
||||
if cookie.secure: secure = "TRUE"
|
||||
else: secure = "FALSE"
|
||||
if cookie.domain.startswith("."): initial_dot = "TRUE"
|
||||
else: initial_dot = "FALSE"
|
||||
if cookie.expires is not None:
|
||||
expires = str(cookie.expires)
|
||||
else:
|
||||
expires = ""
|
||||
if cookie.value is None:
|
||||
# cookies.txt regards 'Set-Cookie: foo' as a cookie
|
||||
# with no name, whereas cookielib regards it as a
|
||||
# cookie with no value.
|
||||
name = ""
|
||||
value = cookie.name
|
||||
else:
|
||||
name = cookie.name
|
||||
value = cookie.value
|
||||
f.write(
|
||||
"\t".join([cookie.domain, initial_dot, cookie.path,
|
||||
secure, expires, name, value])+
|
||||
"\n")
|
||||
finally:
|
||||
f.close()
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open("http://xkcd.com/353/")
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
"""Generic interface to all dbm clones.
|
||||
|
||||
Instead of
|
||||
|
||||
import dbm
|
||||
d = dbm.open(file, 'w', 0666)
|
||||
|
||||
use
|
||||
|
||||
import anydbm
|
||||
d = anydbm.open(file, 'w')
|
||||
|
||||
The returned object is a dbhash, gdbm, dbm or dumbdbm object,
|
||||
dependent on the type of database being opened (determined by whichdb
|
||||
module) in the case of an existing dbm. If the dbm does not exist and
|
||||
the create or new flag ('c' or 'n') was specified, the dbm type will
|
||||
be determined by the availability of the modules (tested in the above
|
||||
order).
|
||||
|
||||
It has the following interface (key and data are strings):
|
||||
|
||||
d[key] = data # store data at key (may override data at
|
||||
# existing key)
|
||||
data = d[key] # retrieve data at key (raise KeyError if no
|
||||
# such key)
|
||||
del d[key] # delete data stored at key (raises KeyError
|
||||
# if no such key)
|
||||
flag = key in d # true if the key exists
|
||||
list = d.keys() # return a list of all existing keys (slow!)
|
||||
|
||||
Future versions may change the order in which implementations are
|
||||
tested for existence, and add interfaces to other dbm-like
|
||||
implementations.
|
||||
"""
|
||||
|
||||
class error(Exception):
|
||||
pass
|
||||
|
||||
_names = ['dbhash', 'gdbm', 'dbm', 'dumbdbm']
|
||||
_errors = [error]
|
||||
_defaultmod = None
|
||||
|
||||
for _name in _names:
|
||||
try:
|
||||
_mod = __import__(_name)
|
||||
except ImportError:
|
||||
continue
|
||||
if not _defaultmod:
|
||||
_defaultmod = _mod
|
||||
_errors.append(_mod.error)
|
||||
|
||||
if not _defaultmod:
|
||||
raise ImportError, "no dbm clone found; tried %s" % _names
|
||||
|
||||
error = tuple(_errors)
|
||||
|
||||
def open(file, flag='r', mode=0666):
|
||||
"""Open or create database at path given by *file*.
|
||||
|
||||
Optional argument *flag* can be 'r' (default) for read-only access, 'w'
|
||||
for read-write access of an existing database, 'c' for read-write access
|
||||
to a new or existing database, and 'n' for read-write access to a new
|
||||
database.
|
||||
|
||||
Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it
|
||||
only if it doesn't exist; and 'n' always creates a new database.
|
||||
"""
|
||||
|
||||
# guess the type of an existing database
|
||||
from whichdb import whichdb
|
||||
result=whichdb(file)
|
||||
if result is None:
|
||||
# db doesn't exist
|
||||
if 'c' in flag or 'n' in flag:
|
||||
# file doesn't exist and the new
|
||||
# flag was used so use default type
|
||||
mod = _defaultmod
|
||||
else:
|
||||
raise error, "need 'c' or 'n' flag to open new db"
|
||||
elif result == "":
|
||||
# db type cannot be determined
|
||||
raise error, "db type could not be determined"
|
||||
else:
|
||||
mod = __import__(result)
|
||||
return mod.open(file, flag, mode)
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
"""
|
||||
atexit.py - allow programmer to define multiple exit functions to be executed
|
||||
upon normal program termination.
|
||||
|
||||
One public function, register, is defined.
|
||||
"""
|
||||
|
||||
__all__ = ["register"]
|
||||
|
||||
import sys
|
||||
|
||||
_exithandlers = []
|
||||
def _run_exitfuncs():
|
||||
"""run any registered exit functions
|
||||
|
||||
_exithandlers is traversed in reverse order so functions are executed
|
||||
last in, first out.
|
||||
"""
|
||||
|
||||
exc_info = None
|
||||
while _exithandlers:
|
||||
func, targs, kargs = _exithandlers.pop()
|
||||
try:
|
||||
func(*targs, **kargs)
|
||||
except SystemExit:
|
||||
exc_info = sys.exc_info()
|
||||
except:
|
||||
import traceback
|
||||
print >> sys.stderr, "Error in atexit._run_exitfuncs:"
|
||||
traceback.print_exc()
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
if exc_info is not None:
|
||||
raise exc_info[0], exc_info[1], exc_info[2]
|
||||
|
||||
|
||||
def register(func, *targs, **kargs):
|
||||
"""register a function to be executed upon normal program termination
|
||||
|
||||
func - function to be called at exit
|
||||
targs - optional arguments to pass to func
|
||||
kargs - optional keyword arguments to pass to func
|
||||
|
||||
func is returned to facilitate usage as a decorator.
|
||||
"""
|
||||
_exithandlers.append((func, targs, kargs))
|
||||
return func
|
||||
|
||||
if hasattr(sys, "exitfunc"):
|
||||
# Assume it's another registered exit function - append it to our list
|
||||
register(sys.exitfunc)
|
||||
sys.exitfunc = _run_exitfuncs
|
||||
|
||||
if __name__ == "__main__":
|
||||
def x1():
|
||||
print "running x1"
|
||||
def x2(n):
|
||||
print "running x2(%r)" % (n,)
|
||||
def x3(n, kwd=None):
|
||||
print "running x3(%r, kwd=%r)" % (n, kwd)
|
||||
|
||||
register(x1)
|
||||
register(x2, 12)
|
||||
register(x3, 5, "bar")
|
||||
register(x3, "no kwd args")
|
||||
|
|
@ -1,260 +0,0 @@
|
|||
"""Classes for manipulating audio devices (currently only for Sun and SGI)"""
|
||||
from warnings import warnpy3k
|
||||
warnpy3k("the audiodev module has been removed in Python 3.0", stacklevel=2)
|
||||
del warnpy3k
|
||||
|
||||
__all__ = ["error","AudioDev"]
|
||||
|
||||
class error(Exception):
|
||||
pass
|
||||
|
||||
class Play_Audio_sgi:
|
||||
# Private instance variables
|
||||
## if 0: access frameratelist, nchannelslist, sampwidthlist, oldparams, \
|
||||
## params, config, inited_outrate, inited_width, \
|
||||
## inited_nchannels, port, converter, classinited: private
|
||||
|
||||
classinited = 0
|
||||
frameratelist = nchannelslist = sampwidthlist = None
|
||||
|
||||
def initclass(self):
|
||||
import AL
|
||||
self.frameratelist = [
|
||||
(48000, AL.RATE_48000),
|
||||
(44100, AL.RATE_44100),
|
||||
(32000, AL.RATE_32000),
|
||||
(22050, AL.RATE_22050),
|
||||
(16000, AL.RATE_16000),
|
||||
(11025, AL.RATE_11025),
|
||||
( 8000, AL.RATE_8000),
|
||||
]
|
||||
self.nchannelslist = [
|
||||
(1, AL.MONO),
|
||||
(2, AL.STEREO),
|
||||
(4, AL.QUADRO),
|
||||
]
|
||||
self.sampwidthlist = [
|
||||
(1, AL.SAMPLE_8),
|
||||
(2, AL.SAMPLE_16),
|
||||
(3, AL.SAMPLE_24),
|
||||
]
|
||||
self.classinited = 1
|
||||
|
||||
def __init__(self):
|
||||
import al, AL
|
||||
if not self.classinited:
|
||||
self.initclass()
|
||||
self.oldparams = []
|
||||
self.params = [AL.OUTPUT_RATE, 0]
|
||||
self.config = al.newconfig()
|
||||
self.inited_outrate = 0
|
||||
self.inited_width = 0
|
||||
self.inited_nchannels = 0
|
||||
self.converter = None
|
||||
self.port = None
|
||||
return
|
||||
|
||||
def __del__(self):
|
||||
if self.port:
|
||||
self.stop()
|
||||
if self.oldparams:
|
||||
import al, AL
|
||||
al.setparams(AL.DEFAULT_DEVICE, self.oldparams)
|
||||
self.oldparams = []
|
||||
|
||||
def wait(self):
|
||||
if not self.port:
|
||||
return
|
||||
import time
|
||||
while self.port.getfilled() > 0:
|
||||
time.sleep(0.1)
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
if self.port:
|
||||
self.port.closeport()
|
||||
self.port = None
|
||||
if self.oldparams:
|
||||
import al, AL
|
||||
al.setparams(AL.DEFAULT_DEVICE, self.oldparams)
|
||||
self.oldparams = []
|
||||
|
||||
def setoutrate(self, rate):
|
||||
for (raw, cooked) in self.frameratelist:
|
||||
if rate == raw:
|
||||
self.params[1] = cooked
|
||||
self.inited_outrate = 1
|
||||
break
|
||||
else:
|
||||
raise error, 'bad output rate'
|
||||
|
||||
def setsampwidth(self, width):
|
||||
for (raw, cooked) in self.sampwidthlist:
|
||||
if width == raw:
|
||||
self.config.setwidth(cooked)
|
||||
self.inited_width = 1
|
||||
break
|
||||
else:
|
||||
if width == 0:
|
||||
import AL
|
||||
self.inited_width = 0
|
||||
self.config.setwidth(AL.SAMPLE_16)
|
||||
self.converter = self.ulaw2lin
|
||||
else:
|
||||
raise error, 'bad sample width'
|
||||
|
||||
def setnchannels(self, nchannels):
|
||||
for (raw, cooked) in self.nchannelslist:
|
||||
if nchannels == raw:
|
||||
self.config.setchannels(cooked)
|
||||
self.inited_nchannels = 1
|
||||
break
|
||||
else:
|
||||
raise error, 'bad # of channels'
|
||||
|
||||
def writeframes(self, data):
|
||||
if not (self.inited_outrate and self.inited_nchannels):
|
||||
raise error, 'params not specified'
|
||||
if not self.port:
|
||||
import al, AL
|
||||
self.port = al.openport('Python', 'w', self.config)
|
||||
self.oldparams = self.params[:]
|
||||
al.getparams(AL.DEFAULT_DEVICE, self.oldparams)
|
||||
al.setparams(AL.DEFAULT_DEVICE, self.params)
|
||||
if self.converter:
|
||||
data = self.converter(data)
|
||||
self.port.writesamps(data)
|
||||
|
||||
def getfilled(self):
|
||||
if self.port:
|
||||
return self.port.getfilled()
|
||||
else:
|
||||
return 0
|
||||
|
||||
def getfillable(self):
|
||||
if self.port:
|
||||
return self.port.getfillable()
|
||||
else:
|
||||
return self.config.getqueuesize()
|
||||
|
||||
# private methods
|
||||
## if 0: access *: private
|
||||
|
||||
def ulaw2lin(self, data):
|
||||
import audioop
|
||||
return audioop.ulaw2lin(data, 2)
|
||||
|
||||
class Play_Audio_sun:
|
||||
## if 0: access outrate, sampwidth, nchannels, inited_outrate, inited_width, \
|
||||
## inited_nchannels, converter: private
|
||||
|
||||
def __init__(self):
|
||||
self.outrate = 0
|
||||
self.sampwidth = 0
|
||||
self.nchannels = 0
|
||||
self.inited_outrate = 0
|
||||
self.inited_width = 0
|
||||
self.inited_nchannels = 0
|
||||
self.converter = None
|
||||
self.port = None
|
||||
return
|
||||
|
||||
def __del__(self):
|
||||
self.stop()
|
||||
|
||||
def setoutrate(self, rate):
|
||||
self.outrate = rate
|
||||
self.inited_outrate = 1
|
||||
|
||||
def setsampwidth(self, width):
|
||||
self.sampwidth = width
|
||||
self.inited_width = 1
|
||||
|
||||
def setnchannels(self, nchannels):
|
||||
self.nchannels = nchannels
|
||||
self.inited_nchannels = 1
|
||||
|
||||
def writeframes(self, data):
|
||||
if not (self.inited_outrate and self.inited_width and self.inited_nchannels):
|
||||
raise error, 'params not specified'
|
||||
if not self.port:
|
||||
import sunaudiodev, SUNAUDIODEV
|
||||
self.port = sunaudiodev.open('w')
|
||||
info = self.port.getinfo()
|
||||
info.o_sample_rate = self.outrate
|
||||
info.o_channels = self.nchannels
|
||||
if self.sampwidth == 0:
|
||||
info.o_precision = 8
|
||||
self.o_encoding = SUNAUDIODEV.ENCODING_ULAW
|
||||
# XXX Hack, hack -- leave defaults
|
||||
else:
|
||||
info.o_precision = 8 * self.sampwidth
|
||||
info.o_encoding = SUNAUDIODEV.ENCODING_LINEAR
|
||||
self.port.setinfo(info)
|
||||
if self.converter:
|
||||
data = self.converter(data)
|
||||
self.port.write(data)
|
||||
|
||||
def wait(self):
|
||||
if not self.port:
|
||||
return
|
||||
self.port.drain()
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
if self.port:
|
||||
self.port.flush()
|
||||
self.port.close()
|
||||
self.port = None
|
||||
|
||||
def getfilled(self):
|
||||
if self.port:
|
||||
return self.port.obufcount()
|
||||
else:
|
||||
return 0
|
||||
|
||||
## # Nobody remembers what this method does, and it's broken. :-(
|
||||
## def getfillable(self):
|
||||
## return BUFFERSIZE - self.getfilled()
|
||||
|
||||
def AudioDev():
|
||||
# Dynamically try to import and use a platform specific module.
|
||||
try:
|
||||
import al
|
||||
except ImportError:
|
||||
try:
|
||||
import sunaudiodev
|
||||
return Play_Audio_sun()
|
||||
except ImportError:
|
||||
try:
|
||||
import Audio_mac
|
||||
except ImportError:
|
||||
raise error, 'no audio device'
|
||||
else:
|
||||
return Audio_mac.Play_Audio_mac()
|
||||
else:
|
||||
return Play_Audio_sgi()
|
||||
|
||||
def test(fn = None):
|
||||
import sys
|
||||
if sys.argv[1:]:
|
||||
fn = sys.argv[1]
|
||||
else:
|
||||
fn = 'f:just samples:just.aif'
|
||||
import aifc
|
||||
af = aifc.open(fn, 'r')
|
||||
print fn, af.getparams()
|
||||
p = AudioDev()
|
||||
p.setoutrate(af.getframerate())
|
||||
p.setsampwidth(af.getsampwidth())
|
||||
p.setnchannels(af.getnchannels())
|
||||
BUFSIZ = af.getframerate()/af.getsampwidth()/af.getnchannels()
|
||||
while 1:
|
||||
data = af.readframes(BUFSIZ)
|
||||
if not data: break
|
||||
print len(data)
|
||||
p.writeframes(data)
|
||||
p.wait()
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
@ -1,360 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
"""RFC 3548: Base16, Base32, Base64 Data Encodings"""
|
||||
|
||||
# Modified 04-Oct-1995 by Jack Jansen to use binascii module
|
||||
# Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support
|
||||
|
||||
import re
|
||||
import struct
|
||||
import binascii
|
||||
|
||||
|
||||
__all__ = [
|
||||
# Legacy interface exports traditional RFC 1521 Base64 encodings
|
||||
'encode', 'decode', 'encodestring', 'decodestring',
|
||||
# Generalized interface for other encodings
|
||||
'b64encode', 'b64decode', 'b32encode', 'b32decode',
|
||||
'b16encode', 'b16decode',
|
||||
# Standard Base64 encoding
|
||||
'standard_b64encode', 'standard_b64decode',
|
||||
# Some common Base64 alternatives. As referenced by RFC 3458, see thread
|
||||
# starting at:
|
||||
#
|
||||
# http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html
|
||||
'urlsafe_b64encode', 'urlsafe_b64decode',
|
||||
]
|
||||
|
||||
_translation = [chr(_x) for _x in range(256)]
|
||||
EMPTYSTRING = ''
|
||||
|
||||
|
||||
def _translate(s, altchars):
|
||||
translation = _translation[:]
|
||||
for k, v in altchars.items():
|
||||
translation[ord(k)] = v
|
||||
return s.translate(''.join(translation))
|
||||
|
||||
|
||||
|
||||
# Base64 encoding/decoding uses binascii
|
||||
|
||||
def b64encode(s, altchars=None):
|
||||
"""Encode a string using Base64.
|
||||
|
||||
s is the string to encode. Optional altchars must be a string of at least
|
||||
length 2 (additional characters are ignored) which specifies an
|
||||
alternative alphabet for the '+' and '/' characters. This allows an
|
||||
application to e.g. generate url or filesystem safe Base64 strings.
|
||||
|
||||
The encoded string is returned.
|
||||
"""
|
||||
# Strip off the trailing newline
|
||||
encoded = binascii.b2a_base64(s)[:-1]
|
||||
if altchars is not None:
|
||||
return _translate(encoded, {'+': altchars[0], '/': altchars[1]})
|
||||
return encoded
|
||||
|
||||
|
||||
def b64decode(s, altchars=None):
|
||||
"""Decode a Base64 encoded string.
|
||||
|
||||
s is the string to decode. Optional altchars must be a string of at least
|
||||
length 2 (additional characters are ignored) which specifies the
|
||||
alternative alphabet used instead of the '+' and '/' characters.
|
||||
|
||||
The decoded string is returned. A TypeError is raised if s were
|
||||
incorrectly padded or if there are non-alphabet characters present in the
|
||||
string.
|
||||
"""
|
||||
if altchars is not None:
|
||||
s = _translate(s, {altchars[0]: '+', altchars[1]: '/'})
|
||||
try:
|
||||
return binascii.a2b_base64(s)
|
||||
except binascii.Error, msg:
|
||||
# Transform this exception for consistency
|
||||
raise TypeError(msg)
|
||||
|
||||
|
||||
def standard_b64encode(s):
|
||||
"""Encode a string using the standard Base64 alphabet.
|
||||
|
||||
s is the string to encode. The encoded string is returned.
|
||||
"""
|
||||
return b64encode(s)
|
||||
|
||||
def standard_b64decode(s):
|
||||
"""Decode a string encoded with the standard Base64 alphabet.
|
||||
|
||||
s is the string to decode. The decoded string is returned. A TypeError
|
||||
is raised if the string is incorrectly padded or if there are non-alphabet
|
||||
characters present in the string.
|
||||
"""
|
||||
return b64decode(s)
|
||||
|
||||
def urlsafe_b64encode(s):
|
||||
"""Encode a string using a url-safe Base64 alphabet.
|
||||
|
||||
s is the string to encode. The encoded string is returned. The alphabet
|
||||
uses '-' instead of '+' and '_' instead of '/'.
|
||||
"""
|
||||
return b64encode(s, '-_')
|
||||
|
||||
def urlsafe_b64decode(s):
|
||||
"""Decode a string encoded with the standard Base64 alphabet.
|
||||
|
||||
s is the string to decode. The decoded string is returned. A TypeError
|
||||
is raised if the string is incorrectly padded or if there are non-alphabet
|
||||
characters present in the string.
|
||||
|
||||
The alphabet uses '-' instead of '+' and '_' instead of '/'.
|
||||
"""
|
||||
return b64decode(s, '-_')
|
||||
|
||||
|
||||
|
||||
# Base32 encoding/decoding must be done in Python
|
||||
_b32alphabet = {
|
||||
0: 'A', 9: 'J', 18: 'S', 27: '3',
|
||||
1: 'B', 10: 'K', 19: 'T', 28: '4',
|
||||
2: 'C', 11: 'L', 20: 'U', 29: '5',
|
||||
3: 'D', 12: 'M', 21: 'V', 30: '6',
|
||||
4: 'E', 13: 'N', 22: 'W', 31: '7',
|
||||
5: 'F', 14: 'O', 23: 'X',
|
||||
6: 'G', 15: 'P', 24: 'Y',
|
||||
7: 'H', 16: 'Q', 25: 'Z',
|
||||
8: 'I', 17: 'R', 26: '2',
|
||||
}
|
||||
|
||||
_b32tab = _b32alphabet.items()
|
||||
_b32tab.sort()
|
||||
_b32tab = [v for k, v in _b32tab]
|
||||
_b32rev = dict([(v, long(k)) for k, v in _b32alphabet.items()])
|
||||
|
||||
|
||||
def b32encode(s):
|
||||
"""Encode a string using Base32.
|
||||
|
||||
s is the string to encode. The encoded string is returned.
|
||||
"""
|
||||
parts = []
|
||||
quanta, leftover = divmod(len(s), 5)
|
||||
# Pad the last quantum with zero bits if necessary
|
||||
if leftover:
|
||||
s += ('\0' * (5 - leftover))
|
||||
quanta += 1
|
||||
for i in range(quanta):
|
||||
# c1 and c2 are 16 bits wide, c3 is 8 bits wide. The intent of this
|
||||
# code is to process the 40 bits in units of 5 bits. So we take the 1
|
||||
# leftover bit of c1 and tack it onto c2. Then we take the 2 leftover
|
||||
# bits of c2 and tack them onto c3. The shifts and masks are intended
|
||||
# to give us values of exactly 5 bits in width.
|
||||
c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5])
|
||||
c2 += (c1 & 1) << 16 # 17 bits wide
|
||||
c3 += (c2 & 3) << 8 # 10 bits wide
|
||||
parts.extend([_b32tab[c1 >> 11], # bits 1 - 5
|
||||
_b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10
|
||||
_b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15
|
||||
_b32tab[c2 >> 12], # bits 16 - 20 (1 - 5)
|
||||
_b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10)
|
||||
_b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15)
|
||||
_b32tab[c3 >> 5], # bits 31 - 35 (1 - 5)
|
||||
_b32tab[c3 & 0x1f], # bits 36 - 40 (1 - 5)
|
||||
])
|
||||
encoded = EMPTYSTRING.join(parts)
|
||||
# Adjust for any leftover partial quanta
|
||||
if leftover == 1:
|
||||
return encoded[:-6] + '======'
|
||||
elif leftover == 2:
|
||||
return encoded[:-4] + '===='
|
||||
elif leftover == 3:
|
||||
return encoded[:-3] + '==='
|
||||
elif leftover == 4:
|
||||
return encoded[:-1] + '='
|
||||
return encoded
|
||||
|
||||
|
||||
def b32decode(s, casefold=False, map01=None):
|
||||
"""Decode a Base32 encoded string.
|
||||
|
||||
s is the string to decode. Optional casefold is a flag specifying whether
|
||||
a lowercase alphabet is acceptable as input. For security purposes, the
|
||||
default is False.
|
||||
|
||||
RFC 3548 allows for optional mapping of the digit 0 (zero) to the letter O
|
||||
(oh), and for optional mapping of the digit 1 (one) to either the letter I
|
||||
(eye) or letter L (el). The optional argument map01 when not None,
|
||||
specifies which letter the digit 1 should be mapped to (when map01 is not
|
||||
None, the digit 0 is always mapped to the letter O). For security
|
||||
purposes the default is None, so that 0 and 1 are not allowed in the
|
||||
input.
|
||||
|
||||
The decoded string is returned. A TypeError is raised if s were
|
||||
incorrectly padded or if there are non-alphabet characters present in the
|
||||
string.
|
||||
"""
|
||||
quanta, leftover = divmod(len(s), 8)
|
||||
if leftover:
|
||||
raise TypeError('Incorrect padding')
|
||||
# Handle section 2.4 zero and one mapping. The flag map01 will be either
|
||||
# False, or the character to map the digit 1 (one) to. It should be
|
||||
# either L (el) or I (eye).
|
||||
if map01:
|
||||
s = _translate(s, {'0': 'O', '1': map01})
|
||||
if casefold:
|
||||
s = s.upper()
|
||||
# Strip off pad characters from the right. We need to count the pad
|
||||
# characters because this will tell us how many null bytes to remove from
|
||||
# the end of the decoded string.
|
||||
padchars = 0
|
||||
mo = re.search('(?P<pad>[=]*)$', s)
|
||||
if mo:
|
||||
padchars = len(mo.group('pad'))
|
||||
if padchars > 0:
|
||||
s = s[:-padchars]
|
||||
# Now decode the full quanta
|
||||
parts = []
|
||||
acc = 0
|
||||
shift = 35
|
||||
for c in s:
|
||||
val = _b32rev.get(c)
|
||||
if val is None:
|
||||
raise TypeError('Non-base32 digit found')
|
||||
acc += _b32rev[c] << shift
|
||||
shift -= 5
|
||||
if shift < 0:
|
||||
parts.append(binascii.unhexlify('%010x' % acc))
|
||||
acc = 0
|
||||
shift = 35
|
||||
# Process the last, partial quanta
|
||||
last = binascii.unhexlify('%010x' % acc)
|
||||
if padchars == 0:
|
||||
last = '' # No characters
|
||||
elif padchars == 1:
|
||||
last = last[:-1]
|
||||
elif padchars == 3:
|
||||
last = last[:-2]
|
||||
elif padchars == 4:
|
||||
last = last[:-3]
|
||||
elif padchars == 6:
|
||||
last = last[:-4]
|
||||
else:
|
||||
raise TypeError('Incorrect padding')
|
||||
parts.append(last)
|
||||
return EMPTYSTRING.join(parts)
|
||||
|
||||
|
||||
|
||||
# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
|
||||
# lowercase. The RFC also recommends against accepting input case
|
||||
# insensitively.
|
||||
def b16encode(s):
|
||||
"""Encode a string using Base16.
|
||||
|
||||
s is the string to encode. The encoded string is returned.
|
||||
"""
|
||||
return binascii.hexlify(s).upper()
|
||||
|
||||
|
||||
def b16decode(s, casefold=False):
|
||||
"""Decode a Base16 encoded string.
|
||||
|
||||
s is the string to decode. Optional casefold is a flag specifying whether
|
||||
a lowercase alphabet is acceptable as input. For security purposes, the
|
||||
default is False.
|
||||
|
||||
The decoded string is returned. A TypeError is raised if s were
|
||||
incorrectly padded or if there are non-alphabet characters present in the
|
||||
string.
|
||||
"""
|
||||
if casefold:
|
||||
s = s.upper()
|
||||
if re.search('[^0-9A-F]', s):
|
||||
raise TypeError('Non-base16 digit found')
|
||||
return binascii.unhexlify(s)
|
||||
|
||||
|
||||
|
||||
# Legacy interface. This code could be cleaned up since I don't believe
|
||||
# binascii has any line length limitations. It just doesn't seem worth it
|
||||
# though.
|
||||
|
||||
MAXLINESIZE = 76 # Excluding the CRLF
|
||||
MAXBINSIZE = (MAXLINESIZE//4)*3
|
||||
|
||||
def encode(input, output):
|
||||
"""Encode a file."""
|
||||
while True:
|
||||
s = input.read(MAXBINSIZE)
|
||||
if not s:
|
||||
break
|
||||
while len(s) < MAXBINSIZE:
|
||||
ns = input.read(MAXBINSIZE-len(s))
|
||||
if not ns:
|
||||
break
|
||||
s += ns
|
||||
line = binascii.b2a_base64(s)
|
||||
output.write(line)
|
||||
|
||||
|
||||
def decode(input, output):
|
||||
"""Decode a file."""
|
||||
while True:
|
||||
line = input.readline()
|
||||
if not line:
|
||||
break
|
||||
s = binascii.a2b_base64(line)
|
||||
output.write(s)
|
||||
|
||||
|
||||
def encodestring(s):
|
||||
"""Encode a string into multiple lines of base-64 data."""
|
||||
pieces = []
|
||||
for i in range(0, len(s), MAXBINSIZE):
|
||||
chunk = s[i : i + MAXBINSIZE]
|
||||
pieces.append(binascii.b2a_base64(chunk))
|
||||
return "".join(pieces)
|
||||
|
||||
|
||||
def decodestring(s):
|
||||
"""Decode a string."""
|
||||
return binascii.a2b_base64(s)
|
||||
|
||||
|
||||
|
||||
# Useable as a script...
|
||||
def test():
|
||||
"""Small test program"""
|
||||
import sys, getopt
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'deut')
|
||||
except getopt.error, msg:
|
||||
sys.stdout = sys.stderr
|
||||
print msg
|
||||
print """usage: %s [-d|-e|-u|-t] [file|-]
|
||||
-d, -u: decode
|
||||
-e: encode (default)
|
||||
-t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0]
|
||||
sys.exit(2)
|
||||
func = encode
|
||||
for o, a in opts:
|
||||
if o == '-e': func = encode
|
||||
if o == '-d': func = decode
|
||||
if o == '-u': func = decode
|
||||
if o == '-t': test1(); return
|
||||
if args and args[0] != '-':
|
||||
with open(args[0], 'rb') as f:
|
||||
func(f, sys.stdout)
|
||||
else:
|
||||
func(sys.stdin, sys.stdout)
|
||||
|
||||
|
||||
def test1():
|
||||
s0 = "Aladdin:open sesame"
|
||||
s1 = encodestring(s0)
|
||||
s2 = decodestring(s1)
|
||||
print s0, repr(s1), s2
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
@ -1,455 +0,0 @@
|
|||
#----------------------------------------------------------------------
|
||||
# Copyright (c) 1999-2001, Digital Creations, Fredericksburg, VA, USA
|
||||
# and Andrew Kuchling. All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# o Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions, and the disclaimer that follows.
|
||||
#
|
||||
# o Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions, and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
#
|
||||
# o Neither the name of Digital Creations nor the names of its
|
||||
# contributors may be used to endorse or promote products derived
|
||||
# from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY DIGITAL CREATIONS AND CONTRIBUTORS *AS
|
||||
# IS* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL DIGITAL
|
||||
# CREATIONS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
"""Support for Berkeley DB 4.3 through 5.3 with a simple interface.
|
||||
|
||||
For the full featured object oriented interface use the bsddb.db module
|
||||
instead. It mirrors the Oracle Berkeley DB C API.
|
||||
"""
|
||||
|
||||
import sys
|
||||
absolute_import = (sys.version_info[0] >= 3)
|
||||
|
||||
if (sys.version_info >= (2, 6)) and (sys.version_info < (3, 0)) :
|
||||
import warnings
|
||||
if sys.py3kwarning and (__name__ != 'bsddb3') :
|
||||
warnings.warnpy3k("in 3.x, the bsddb module has been removed; "
|
||||
"please use the pybsddb project instead",
|
||||
DeprecationWarning, 2)
|
||||
warnings.filterwarnings("ignore", ".*CObject.*", DeprecationWarning,
|
||||
"bsddb.__init__")
|
||||
|
||||
try:
|
||||
if __name__ == 'bsddb3':
|
||||
# import _pybsddb binary as it should be the more recent version from
|
||||
# a standalone pybsddb addon package than the version included with
|
||||
# python as bsddb._bsddb.
|
||||
if absolute_import :
|
||||
# Because this syntaxis is not valid before Python 2.5
|
||||
exec("from . import _pybsddb")
|
||||
else :
|
||||
import _pybsddb
|
||||
_bsddb = _pybsddb
|
||||
from bsddb3.dbutils import DeadlockWrap as _DeadlockWrap
|
||||
else:
|
||||
import _bsddb
|
||||
from bsddb.dbutils import DeadlockWrap as _DeadlockWrap
|
||||
except ImportError:
|
||||
# Remove ourselves from sys.modules
|
||||
import sys
|
||||
del sys.modules[__name__]
|
||||
raise
|
||||
|
||||
# bsddb3 calls it db, but provide _db for backwards compatibility
|
||||
db = _db = _bsddb
|
||||
__version__ = db.__version__
|
||||
|
||||
error = db.DBError # So bsddb.error will mean something...
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
import sys, os
|
||||
|
||||
from weakref import ref
|
||||
|
||||
if sys.version_info < (2, 6) :
|
||||
import UserDict
|
||||
MutableMapping = UserDict.DictMixin
|
||||
else :
|
||||
import collections
|
||||
MutableMapping = collections.MutableMapping
|
||||
|
||||
class _iter_mixin(MutableMapping):
|
||||
def _make_iter_cursor(self):
|
||||
cur = _DeadlockWrap(self.db.cursor)
|
||||
key = id(cur)
|
||||
self._cursor_refs[key] = ref(cur, self._gen_cref_cleaner(key))
|
||||
return cur
|
||||
|
||||
def _gen_cref_cleaner(self, key):
|
||||
# use generate the function for the weakref callback here
|
||||
# to ensure that we do not hold a strict reference to cur
|
||||
# in the callback.
|
||||
return lambda ref: self._cursor_refs.pop(key, None)
|
||||
|
||||
def __iter__(self):
|
||||
self._kill_iteration = False
|
||||
self._in_iter += 1
|
||||
try:
|
||||
try:
|
||||
cur = self._make_iter_cursor()
|
||||
|
||||
# FIXME-20031102-greg: race condition. cursor could
|
||||
# be closed by another thread before this call.
|
||||
|
||||
# since we're only returning keys, we call the cursor
|
||||
# methods with flags=0, dlen=0, dofs=0
|
||||
key = _DeadlockWrap(cur.first, 0,0,0)[0]
|
||||
yield key
|
||||
|
||||
next = getattr(cur, "next")
|
||||
while 1:
|
||||
try:
|
||||
key = _DeadlockWrap(next, 0,0,0)[0]
|
||||
yield key
|
||||
except _bsddb.DBCursorClosedError:
|
||||
if self._kill_iteration:
|
||||
raise RuntimeError('Database changed size '
|
||||
'during iteration.')
|
||||
cur = self._make_iter_cursor()
|
||||
# FIXME-20031101-greg: race condition. cursor could
|
||||
# be closed by another thread before this call.
|
||||
_DeadlockWrap(cur.set, key,0,0,0)
|
||||
next = getattr(cur, "next")
|
||||
except _bsddb.DBNotFoundError:
|
||||
pass
|
||||
except _bsddb.DBCursorClosedError:
|
||||
# the database was modified during iteration. abort.
|
||||
pass
|
||||
# When Python 2.4 not supported in bsddb3, we can change this to "finally"
|
||||
except :
|
||||
self._in_iter -= 1
|
||||
raise
|
||||
|
||||
self._in_iter -= 1
|
||||
|
||||
def iteritems(self):
|
||||
if not self.db:
|
||||
return
|
||||
self._kill_iteration = False
|
||||
self._in_iter += 1
|
||||
try:
|
||||
try:
|
||||
cur = self._make_iter_cursor()
|
||||
|
||||
# FIXME-20031102-greg: race condition. cursor could
|
||||
# be closed by another thread before this call.
|
||||
|
||||
kv = _DeadlockWrap(cur.first)
|
||||
key = kv[0]
|
||||
yield kv
|
||||
|
||||
next = getattr(cur, "next")
|
||||
while 1:
|
||||
try:
|
||||
kv = _DeadlockWrap(next)
|
||||
key = kv[0]
|
||||
yield kv
|
||||
except _bsddb.DBCursorClosedError:
|
||||
if self._kill_iteration:
|
||||
raise RuntimeError('Database changed size '
|
||||
'during iteration.')
|
||||
cur = self._make_iter_cursor()
|
||||
# FIXME-20031101-greg: race condition. cursor could
|
||||
# be closed by another thread before this call.
|
||||
_DeadlockWrap(cur.set, key,0,0,0)
|
||||
next = getattr(cur, "next")
|
||||
except _bsddb.DBNotFoundError:
|
||||
pass
|
||||
except _bsddb.DBCursorClosedError:
|
||||
# the database was modified during iteration. abort.
|
||||
pass
|
||||
# When Python 2.4 not supported in bsddb3, we can change this to "finally"
|
||||
except :
|
||||
self._in_iter -= 1
|
||||
raise
|
||||
|
||||
self._in_iter -= 1
|
||||
|
||||
|
||||
class _DBWithCursor(_iter_mixin):
|
||||
"""
|
||||
A simple wrapper around DB that makes it look like the bsddbobject in
|
||||
the old module. It uses a cursor as needed to provide DB traversal.
|
||||
"""
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
self.db.set_get_returns_none(0)
|
||||
|
||||
# FIXME-20031101-greg: I believe there is still the potential
|
||||
# for deadlocks in a multithreaded environment if someone
|
||||
# attempts to use the any of the cursor interfaces in one
|
||||
# thread while doing a put or delete in another thread. The
|
||||
# reason is that _checkCursor and _closeCursors are not atomic
|
||||
# operations. Doing our own locking around self.dbc,
|
||||
# self.saved_dbc_key and self._cursor_refs could prevent this.
|
||||
# TODO: A test case demonstrating the problem needs to be written.
|
||||
|
||||
# self.dbc is a DBCursor object used to implement the
|
||||
# first/next/previous/last/set_location methods.
|
||||
self.dbc = None
|
||||
self.saved_dbc_key = None
|
||||
|
||||
# a collection of all DBCursor objects currently allocated
|
||||
# by the _iter_mixin interface.
|
||||
self._cursor_refs = {}
|
||||
self._in_iter = 0
|
||||
self._kill_iteration = False
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def _checkCursor(self):
|
||||
if self.dbc is None:
|
||||
self.dbc = _DeadlockWrap(self.db.cursor)
|
||||
if self.saved_dbc_key is not None:
|
||||
_DeadlockWrap(self.dbc.set, self.saved_dbc_key)
|
||||
self.saved_dbc_key = None
|
||||
|
||||
# This method is needed for all non-cursor DB calls to avoid
|
||||
# Berkeley DB deadlocks (due to being opened with DB_INIT_LOCK
|
||||
# and DB_THREAD to be thread safe) when intermixing database
|
||||
# operations that use the cursor internally with those that don't.
|
||||
def _closeCursors(self, save=1):
|
||||
if self.dbc:
|
||||
c = self.dbc
|
||||
self.dbc = None
|
||||
if save:
|
||||
try:
|
||||
self.saved_dbc_key = _DeadlockWrap(c.current, 0,0,0)[0]
|
||||
except db.DBError:
|
||||
pass
|
||||
_DeadlockWrap(c.close)
|
||||
del c
|
||||
for cref in self._cursor_refs.values():
|
||||
c = cref()
|
||||
if c is not None:
|
||||
_DeadlockWrap(c.close)
|
||||
|
||||
def _checkOpen(self):
|
||||
if self.db is None:
|
||||
raise error, "BSDDB object has already been closed"
|
||||
|
||||
def isOpen(self):
|
||||
return self.db is not None
|
||||
|
||||
def __len__(self):
|
||||
self._checkOpen()
|
||||
return _DeadlockWrap(lambda: len(self.db)) # len(self.db)
|
||||
|
||||
if sys.version_info >= (2, 6) :
|
||||
def __repr__(self) :
|
||||
if self.isOpen() :
|
||||
return repr(dict(_DeadlockWrap(self.db.items)))
|
||||
return repr(dict())
|
||||
|
||||
def __getitem__(self, key):
|
||||
self._checkOpen()
|
||||
return _DeadlockWrap(lambda: self.db[key]) # self.db[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self._checkOpen()
|
||||
self._closeCursors()
|
||||
if self._in_iter and key not in self:
|
||||
self._kill_iteration = True
|
||||
def wrapF():
|
||||
self.db[key] = value
|
||||
_DeadlockWrap(wrapF) # self.db[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
self._checkOpen()
|
||||
self._closeCursors()
|
||||
if self._in_iter and key in self:
|
||||
self._kill_iteration = True
|
||||
def wrapF():
|
||||
del self.db[key]
|
||||
_DeadlockWrap(wrapF) # del self.db[key]
|
||||
|
||||
def close(self):
|
||||
self._closeCursors(save=0)
|
||||
if self.dbc is not None:
|
||||
_DeadlockWrap(self.dbc.close)
|
||||
v = 0
|
||||
if self.db is not None:
|
||||
v = _DeadlockWrap(self.db.close)
|
||||
self.dbc = None
|
||||
self.db = None
|
||||
return v
|
||||
|
||||
def keys(self):
|
||||
self._checkOpen()
|
||||
return _DeadlockWrap(self.db.keys)
|
||||
|
||||
def has_key(self, key):
|
||||
self._checkOpen()
|
||||
return _DeadlockWrap(self.db.has_key, key)
|
||||
|
||||
def set_location(self, key):
|
||||
self._checkOpen()
|
||||
self._checkCursor()
|
||||
return _DeadlockWrap(self.dbc.set_range, key)
|
||||
|
||||
def next(self): # Renamed by "2to3"
|
||||
self._checkOpen()
|
||||
self._checkCursor()
|
||||
rv = _DeadlockWrap(getattr(self.dbc, "next"))
|
||||
return rv
|
||||
|
||||
if sys.version_info[0] >= 3 : # For "2to3" conversion
|
||||
next = __next__
|
||||
|
||||
def previous(self):
|
||||
self._checkOpen()
|
||||
self._checkCursor()
|
||||
rv = _DeadlockWrap(self.dbc.prev)
|
||||
return rv
|
||||
|
||||
def first(self):
|
||||
self._checkOpen()
|
||||
# fix 1725856: don't needlessly try to restore our cursor position
|
||||
self.saved_dbc_key = None
|
||||
self._checkCursor()
|
||||
rv = _DeadlockWrap(self.dbc.first)
|
||||
return rv
|
||||
|
||||
def last(self):
|
||||
self._checkOpen()
|
||||
# fix 1725856: don't needlessly try to restore our cursor position
|
||||
self.saved_dbc_key = None
|
||||
self._checkCursor()
|
||||
rv = _DeadlockWrap(self.dbc.last)
|
||||
return rv
|
||||
|
||||
def sync(self):
|
||||
self._checkOpen()
|
||||
return _DeadlockWrap(self.db.sync)
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# Compatibility object factory functions
|
||||
|
||||
def hashopen(file, flag='c', mode=0666, pgsize=None, ffactor=None, nelem=None,
|
||||
cachesize=None, lorder=None, hflags=0):
|
||||
|
||||
flags = _checkflag(flag, file)
|
||||
e = _openDBEnv(cachesize)
|
||||
d = db.DB(e)
|
||||
d.set_flags(hflags)
|
||||
if pgsize is not None: d.set_pagesize(pgsize)
|
||||
if lorder is not None: d.set_lorder(lorder)
|
||||
if ffactor is not None: d.set_h_ffactor(ffactor)
|
||||
if nelem is not None: d.set_h_nelem(nelem)
|
||||
d.open(file, db.DB_HASH, flags, mode)
|
||||
return _DBWithCursor(d)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def btopen(file, flag='c', mode=0666,
|
||||
btflags=0, cachesize=None, maxkeypage=None, minkeypage=None,
|
||||
pgsize=None, lorder=None):
|
||||
|
||||
flags = _checkflag(flag, file)
|
||||
e = _openDBEnv(cachesize)
|
||||
d = db.DB(e)
|
||||
if pgsize is not None: d.set_pagesize(pgsize)
|
||||
if lorder is not None: d.set_lorder(lorder)
|
||||
d.set_flags(btflags)
|
||||
if minkeypage is not None: d.set_bt_minkey(minkeypage)
|
||||
if maxkeypage is not None: d.set_bt_maxkey(maxkeypage)
|
||||
d.open(file, db.DB_BTREE, flags, mode)
|
||||
return _DBWithCursor(d)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
def rnopen(file, flag='c', mode=0666,
|
||||
rnflags=0, cachesize=None, pgsize=None, lorder=None,
|
||||
rlen=None, delim=None, source=None, pad=None):
|
||||
|
||||
flags = _checkflag(flag, file)
|
||||
e = _openDBEnv(cachesize)
|
||||
d = db.DB(e)
|
||||
if pgsize is not None: d.set_pagesize(pgsize)
|
||||
if lorder is not None: d.set_lorder(lorder)
|
||||
d.set_flags(rnflags)
|
||||
if delim is not None: d.set_re_delim(delim)
|
||||
if rlen is not None: d.set_re_len(rlen)
|
||||
if source is not None: d.set_re_source(source)
|
||||
if pad is not None: d.set_re_pad(pad)
|
||||
d.open(file, db.DB_RECNO, flags, mode)
|
||||
return _DBWithCursor(d)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def _openDBEnv(cachesize):
|
||||
e = db.DBEnv()
|
||||
if cachesize is not None:
|
||||
if cachesize >= 20480:
|
||||
e.set_cachesize(0, cachesize)
|
||||
else:
|
||||
raise error, "cachesize must be >= 20480"
|
||||
e.set_lk_detect(db.DB_LOCK_DEFAULT)
|
||||
e.open('.', db.DB_PRIVATE | db.DB_CREATE | db.DB_THREAD | db.DB_INIT_LOCK | db.DB_INIT_MPOOL)
|
||||
return e
|
||||
|
||||
def _checkflag(flag, file):
|
||||
if flag == 'r':
|
||||
flags = db.DB_RDONLY
|
||||
elif flag == 'rw':
|
||||
flags = 0
|
||||
elif flag == 'w':
|
||||
flags = db.DB_CREATE
|
||||
elif flag == 'c':
|
||||
flags = db.DB_CREATE
|
||||
elif flag == 'n':
|
||||
flags = db.DB_CREATE
|
||||
#flags = db.DB_CREATE | db.DB_TRUNCATE
|
||||
# we used db.DB_TRUNCATE flag for this before but Berkeley DB
|
||||
# 4.2.52 changed to disallowed truncate with txn environments.
|
||||
if file is not None and os.path.isfile(file):
|
||||
os.unlink(file)
|
||||
else:
|
||||
raise error, "flags should be one of 'r', 'w', 'c' or 'n'"
|
||||
return flags | db.DB_THREAD
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
# This is a silly little hack that allows apps to continue to use the
|
||||
# DB_THREAD flag even on systems without threads without freaking out
|
||||
# Berkeley DB.
|
||||
#
|
||||
# This assumes that if Python was built with thread support then
|
||||
# Berkeley DB was too.
|
||||
|
||||
try:
|
||||
# 2to3 automatically changes "import thread" to "import _thread"
|
||||
import thread as T
|
||||
del T
|
||||
|
||||
except ImportError:
|
||||
db.DB_THREAD = 0
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
#----------------------------------------------------------------------
|
||||
# Copyright (c) 1999-2001, Digital Creations, Fredericksburg, VA, USA
|
||||
# and Andrew Kuchling. All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# o Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions, and the disclaimer that follows.
|
||||
#
|
||||
# o Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions, and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
#
|
||||
# o Neither the name of Digital Creations nor the names of its
|
||||
# contributors may be used to endorse or promote products derived
|
||||
# from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY DIGITAL CREATIONS AND CONTRIBUTORS *AS
|
||||
# IS* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL DIGITAL
|
||||
# CREATIONS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
# This module is just a placeholder for possible future expansion, in
|
||||
# case we ever want to augment the stuff in _db in any way. For now
|
||||
# it just simply imports everything from _db.
|
||||
|
||||
import sys
|
||||
absolute_import = (sys.version_info[0] >= 3)
|
||||
|
||||
if not absolute_import :
|
||||
if __name__.startswith('bsddb3.') :
|
||||
# import _pybsddb binary as it should be the more recent version from
|
||||
# a standalone pybsddb addon package than the version included with
|
||||
# python as bsddb._bsddb.
|
||||
from _pybsddb import *
|
||||
from _pybsddb import __version__
|
||||
else:
|
||||
from _bsddb import *
|
||||
from _bsddb import __version__
|
||||
else :
|
||||
# Because this syntaxis is not valid before Python 2.5
|
||||
if __name__.startswith('bsddb3.') :
|
||||
exec("from ._pybsddb import *")
|
||||
exec("from ._pybsddb import __version__")
|
||||
else :
|
||||
exec("from ._bsddb import *")
|
||||
exec("from ._bsddb import __version__")
|
||||
|
|
@ -1,266 +0,0 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# This file contains real Python object wrappers for DB and DBEnv
|
||||
# C "objects" that can be usefully subclassed. The previous SWIG
|
||||
# based interface allowed this thanks to SWIG's shadow classes.
|
||||
# -- Gregory P. Smith
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# (C) Copyright 2001 Autonomous Zone Industries
|
||||
#
|
||||
# License: This is free software. You may use this software for any
|
||||
# purpose including modification/redistribution, so long as
|
||||
# this header remains intact and that you do not claim any
|
||||
# rights of ownership or authorship of this software. This
|
||||
# software has been tested, but no warranty is expressed or
|
||||
# implied.
|
||||
#
|
||||
|
||||
#
|
||||
# TODO it would be *really nice* to have an automatic shadow class populator
|
||||
# so that new methods don't need to be added here manually after being
|
||||
# added to _bsddb.c.
|
||||
#
|
||||
|
||||
import sys
|
||||
absolute_import = (sys.version_info[0] >= 3)
|
||||
if absolute_import :
|
||||
# Because this syntaxis is not valid before Python 2.5
|
||||
exec("from . import db")
|
||||
else :
|
||||
import db
|
||||
|
||||
if sys.version_info < (2, 6) :
|
||||
from UserDict import DictMixin as MutableMapping
|
||||
else :
|
||||
import collections
|
||||
MutableMapping = collections.MutableMapping
|
||||
|
||||
class DBEnv:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._cobj = db.DBEnv(*args, **kwargs)
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
return self._cobj.close(*args, **kwargs)
|
||||
def open(self, *args, **kwargs):
|
||||
return self._cobj.open(*args, **kwargs)
|
||||
def remove(self, *args, **kwargs):
|
||||
return self._cobj.remove(*args, **kwargs)
|
||||
def set_shm_key(self, *args, **kwargs):
|
||||
return self._cobj.set_shm_key(*args, **kwargs)
|
||||
def set_cachesize(self, *args, **kwargs):
|
||||
return self._cobj.set_cachesize(*args, **kwargs)
|
||||
def set_data_dir(self, *args, **kwargs):
|
||||
return self._cobj.set_data_dir(*args, **kwargs)
|
||||
def set_flags(self, *args, **kwargs):
|
||||
return self._cobj.set_flags(*args, **kwargs)
|
||||
def set_lg_bsize(self, *args, **kwargs):
|
||||
return self._cobj.set_lg_bsize(*args, **kwargs)
|
||||
def set_lg_dir(self, *args, **kwargs):
|
||||
return self._cobj.set_lg_dir(*args, **kwargs)
|
||||
def set_lg_max(self, *args, **kwargs):
|
||||
return self._cobj.set_lg_max(*args, **kwargs)
|
||||
def set_lk_detect(self, *args, **kwargs):
|
||||
return self._cobj.set_lk_detect(*args, **kwargs)
|
||||
if db.version() < (4,5):
|
||||
def set_lk_max(self, *args, **kwargs):
|
||||
return self._cobj.set_lk_max(*args, **kwargs)
|
||||
def set_lk_max_locks(self, *args, **kwargs):
|
||||
return self._cobj.set_lk_max_locks(*args, **kwargs)
|
||||
def set_lk_max_lockers(self, *args, **kwargs):
|
||||
return self._cobj.set_lk_max_lockers(*args, **kwargs)
|
||||
def set_lk_max_objects(self, *args, **kwargs):
|
||||
return self._cobj.set_lk_max_objects(*args, **kwargs)
|
||||
def set_mp_mmapsize(self, *args, **kwargs):
|
||||
return self._cobj.set_mp_mmapsize(*args, **kwargs)
|
||||
def set_timeout(self, *args, **kwargs):
|
||||
return self._cobj.set_timeout(*args, **kwargs)
|
||||
def set_tmp_dir(self, *args, **kwargs):
|
||||
return self._cobj.set_tmp_dir(*args, **kwargs)
|
||||
def txn_begin(self, *args, **kwargs):
|
||||
return self._cobj.txn_begin(*args, **kwargs)
|
||||
def txn_checkpoint(self, *args, **kwargs):
|
||||
return self._cobj.txn_checkpoint(*args, **kwargs)
|
||||
def txn_stat(self, *args, **kwargs):
|
||||
return self._cobj.txn_stat(*args, **kwargs)
|
||||
def set_tx_max(self, *args, **kwargs):
|
||||
return self._cobj.set_tx_max(*args, **kwargs)
|
||||
def set_tx_timestamp(self, *args, **kwargs):
|
||||
return self._cobj.set_tx_timestamp(*args, **kwargs)
|
||||
def lock_detect(self, *args, **kwargs):
|
||||
return self._cobj.lock_detect(*args, **kwargs)
|
||||
def lock_get(self, *args, **kwargs):
|
||||
return self._cobj.lock_get(*args, **kwargs)
|
||||
def lock_id(self, *args, **kwargs):
|
||||
return self._cobj.lock_id(*args, **kwargs)
|
||||
def lock_put(self, *args, **kwargs):
|
||||
return self._cobj.lock_put(*args, **kwargs)
|
||||
def lock_stat(self, *args, **kwargs):
|
||||
return self._cobj.lock_stat(*args, **kwargs)
|
||||
def log_archive(self, *args, **kwargs):
|
||||
return self._cobj.log_archive(*args, **kwargs)
|
||||
|
||||
def set_get_returns_none(self, *args, **kwargs):
|
||||
return self._cobj.set_get_returns_none(*args, **kwargs)
|
||||
|
||||
def log_stat(self, *args, **kwargs):
|
||||
return self._cobj.log_stat(*args, **kwargs)
|
||||
|
||||
def dbremove(self, *args, **kwargs):
|
||||
return self._cobj.dbremove(*args, **kwargs)
|
||||
def dbrename(self, *args, **kwargs):
|
||||
return self._cobj.dbrename(*args, **kwargs)
|
||||
def set_encrypt(self, *args, **kwargs):
|
||||
return self._cobj.set_encrypt(*args, **kwargs)
|
||||
|
||||
if db.version() >= (4,4):
|
||||
def fileid_reset(self, *args, **kwargs):
|
||||
return self._cobj.fileid_reset(*args, **kwargs)
|
||||
|
||||
def lsn_reset(self, *args, **kwargs):
|
||||
return self._cobj.lsn_reset(*args, **kwargs)
|
||||
|
||||
|
||||
class DB(MutableMapping):
|
||||
def __init__(self, dbenv, *args, **kwargs):
|
||||
# give it the proper DBEnv C object that its expecting
|
||||
self._cobj = db.DB(*((dbenv._cobj,) + args), **kwargs)
|
||||
|
||||
# TODO are there other dict methods that need to be overridden?
|
||||
def __len__(self):
|
||||
return len(self._cobj)
|
||||
def __getitem__(self, arg):
|
||||
return self._cobj[arg]
|
||||
def __setitem__(self, key, value):
|
||||
self._cobj[key] = value
|
||||
def __delitem__(self, arg):
|
||||
del self._cobj[arg]
|
||||
|
||||
if sys.version_info >= (2, 6) :
|
||||
def __iter__(self) :
|
||||
return self._cobj.__iter__()
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
return self._cobj.append(*args, **kwargs)
|
||||
def associate(self, *args, **kwargs):
|
||||
return self._cobj.associate(*args, **kwargs)
|
||||
def close(self, *args, **kwargs):
|
||||
return self._cobj.close(*args, **kwargs)
|
||||
def consume(self, *args, **kwargs):
|
||||
return self._cobj.consume(*args, **kwargs)
|
||||
def consume_wait(self, *args, **kwargs):
|
||||
return self._cobj.consume_wait(*args, **kwargs)
|
||||
def cursor(self, *args, **kwargs):
|
||||
return self._cobj.cursor(*args, **kwargs)
|
||||
def delete(self, *args, **kwargs):
|
||||
return self._cobj.delete(*args, **kwargs)
|
||||
def fd(self, *args, **kwargs):
|
||||
return self._cobj.fd(*args, **kwargs)
|
||||
def get(self, *args, **kwargs):
|
||||
return self._cobj.get(*args, **kwargs)
|
||||
def pget(self, *args, **kwargs):
|
||||
return self._cobj.pget(*args, **kwargs)
|
||||
def get_both(self, *args, **kwargs):
|
||||
return self._cobj.get_both(*args, **kwargs)
|
||||
def get_byteswapped(self, *args, **kwargs):
|
||||
return self._cobj.get_byteswapped(*args, **kwargs)
|
||||
def get_size(self, *args, **kwargs):
|
||||
return self._cobj.get_size(*args, **kwargs)
|
||||
def get_type(self, *args, **kwargs):
|
||||
return self._cobj.get_type(*args, **kwargs)
|
||||
def join(self, *args, **kwargs):
|
||||
return self._cobj.join(*args, **kwargs)
|
||||
def key_range(self, *args, **kwargs):
|
||||
return self._cobj.key_range(*args, **kwargs)
|
||||
def has_key(self, *args, **kwargs):
|
||||
return self._cobj.has_key(*args, **kwargs)
|
||||
def items(self, *args, **kwargs):
|
||||
return self._cobj.items(*args, **kwargs)
|
||||
def keys(self, *args, **kwargs):
|
||||
return self._cobj.keys(*args, **kwargs)
|
||||
def open(self, *args, **kwargs):
|
||||
return self._cobj.open(*args, **kwargs)
|
||||
def put(self, *args, **kwargs):
|
||||
return self._cobj.put(*args, **kwargs)
|
||||
def remove(self, *args, **kwargs):
|
||||
return self._cobj.remove(*args, **kwargs)
|
||||
def rename(self, *args, **kwargs):
|
||||
return self._cobj.rename(*args, **kwargs)
|
||||
def set_bt_minkey(self, *args, **kwargs):
|
||||
return self._cobj.set_bt_minkey(*args, **kwargs)
|
||||
def set_bt_compare(self, *args, **kwargs):
|
||||
return self._cobj.set_bt_compare(*args, **kwargs)
|
||||
def set_cachesize(self, *args, **kwargs):
|
||||
return self._cobj.set_cachesize(*args, **kwargs)
|
||||
def set_dup_compare(self, *args, **kwargs) :
|
||||
return self._cobj.set_dup_compare(*args, **kwargs)
|
||||
def set_flags(self, *args, **kwargs):
|
||||
return self._cobj.set_flags(*args, **kwargs)
|
||||
def set_h_ffactor(self, *args, **kwargs):
|
||||
return self._cobj.set_h_ffactor(*args, **kwargs)
|
||||
def set_h_nelem(self, *args, **kwargs):
|
||||
return self._cobj.set_h_nelem(*args, **kwargs)
|
||||
def set_lorder(self, *args, **kwargs):
|
||||
return self._cobj.set_lorder(*args, **kwargs)
|
||||
def set_pagesize(self, *args, **kwargs):
|
||||
return self._cobj.set_pagesize(*args, **kwargs)
|
||||
def set_re_delim(self, *args, **kwargs):
|
||||
return self._cobj.set_re_delim(*args, **kwargs)
|
||||
def set_re_len(self, *args, **kwargs):
|
||||
return self._cobj.set_re_len(*args, **kwargs)
|
||||
def set_re_pad(self, *args, **kwargs):
|
||||
return self._cobj.set_re_pad(*args, **kwargs)
|
||||
def set_re_source(self, *args, **kwargs):
|
||||
return self._cobj.set_re_source(*args, **kwargs)
|
||||
def set_q_extentsize(self, *args, **kwargs):
|
||||
return self._cobj.set_q_extentsize(*args, **kwargs)
|
||||
def stat(self, *args, **kwargs):
|
||||
return self._cobj.stat(*args, **kwargs)
|
||||
def sync(self, *args, **kwargs):
|
||||
return self._cobj.sync(*args, **kwargs)
|
||||
def type(self, *args, **kwargs):
|
||||
return self._cobj.type(*args, **kwargs)
|
||||
def upgrade(self, *args, **kwargs):
|
||||
return self._cobj.upgrade(*args, **kwargs)
|
||||
def values(self, *args, **kwargs):
|
||||
return self._cobj.values(*args, **kwargs)
|
||||
def verify(self, *args, **kwargs):
|
||||
return self._cobj.verify(*args, **kwargs)
|
||||
def set_get_returns_none(self, *args, **kwargs):
|
||||
return self._cobj.set_get_returns_none(*args, **kwargs)
|
||||
|
||||
def set_encrypt(self, *args, **kwargs):
|
||||
return self._cobj.set_encrypt(*args, **kwargs)
|
||||
|
||||
|
||||
class DBSequence:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._cobj = db.DBSequence(*args, **kwargs)
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
return self._cobj.close(*args, **kwargs)
|
||||
def get(self, *args, **kwargs):
|
||||
return self._cobj.get(*args, **kwargs)
|
||||
def get_dbp(self, *args, **kwargs):
|
||||
return self._cobj.get_dbp(*args, **kwargs)
|
||||
def get_key(self, *args, **kwargs):
|
||||
return self._cobj.get_key(*args, **kwargs)
|
||||
def init_value(self, *args, **kwargs):
|
||||
return self._cobj.init_value(*args, **kwargs)
|
||||
def open(self, *args, **kwargs):
|
||||
return self._cobj.open(*args, **kwargs)
|
||||
def remove(self, *args, **kwargs):
|
||||
return self._cobj.remove(*args, **kwargs)
|
||||
def stat(self, *args, **kwargs):
|
||||
return self._cobj.stat(*args, **kwargs)
|
||||
def set_cachesize(self, *args, **kwargs):
|
||||
return self._cobj.set_cachesize(*args, **kwargs)
|
||||
def set_flags(self, *args, **kwargs):
|
||||
return self._cobj.set_flags(*args, **kwargs)
|
||||
def set_range(self, *args, **kwargs):
|
||||
return self._cobj.set_range(*args, **kwargs)
|
||||
def get_cachesize(self, *args, **kwargs):
|
||||
return self._cobj.get_cachesize(*args, **kwargs)
|
||||
def get_flags(self, *args, **kwargs):
|
||||
return self._cobj.get_flags(*args, **kwargs)
|
||||
def get_range(self, *args, **kwargs):
|
||||
return self._cobj.get_range(*args, **kwargs)
|
||||
|
|
@ -1,190 +0,0 @@
|
|||
|
||||
"""
|
||||
File-like objects that read from or write to a bsddb record.
|
||||
|
||||
This implements (nearly) all stdio methods.
|
||||
|
||||
f = DBRecIO(db, key, txn=None)
|
||||
f.close() # explicitly release resources held
|
||||
flag = f.isatty() # always false
|
||||
pos = f.tell() # get current position
|
||||
f.seek(pos) # set current position
|
||||
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
|
||||
buf = f.read() # read until EOF
|
||||
buf = f.read(n) # read up to n bytes
|
||||
f.truncate([size]) # truncate file at to at most size (default: current pos)
|
||||
f.write(buf) # write at current position
|
||||
f.writelines(list) # for line in list: f.write(line)
|
||||
|
||||
Notes:
|
||||
- fileno() is left unimplemented so that code which uses it triggers
|
||||
an exception early.
|
||||
- There's a simple test set (see end of this file) - not yet updated
|
||||
for DBRecIO.
|
||||
- readline() is not implemented yet.
|
||||
|
||||
|
||||
From:
|
||||
Itamar Shtull-Trauring <itamar@maxnm.com>
|
||||
"""
|
||||
|
||||
import errno
|
||||
import string
|
||||
|
||||
class DBRecIO:
|
||||
def __init__(self, db, key, txn=None):
|
||||
self.db = db
|
||||
self.key = key
|
||||
self.txn = txn
|
||||
self.len = None
|
||||
self.pos = 0
|
||||
self.closed = 0
|
||||
self.softspace = 0
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self.closed = 1
|
||||
del self.db, self.txn
|
||||
|
||||
def isatty(self):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
return 0
|
||||
|
||||
def seek(self, pos, mode = 0):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
if mode == 1:
|
||||
pos = pos + self.pos
|
||||
elif mode == 2:
|
||||
pos = pos + self.len
|
||||
self.pos = max(0, pos)
|
||||
|
||||
def tell(self):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
return self.pos
|
||||
|
||||
def read(self, n = -1):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
if n < 0:
|
||||
newpos = self.len
|
||||
else:
|
||||
newpos = min(self.pos+n, self.len)
|
||||
|
||||
dlen = newpos - self.pos
|
||||
|
||||
r = self.db.get(self.key, txn=self.txn, dlen=dlen, doff=self.pos)
|
||||
self.pos = newpos
|
||||
return r
|
||||
|
||||
__fixme = """
|
||||
def readline(self, length=None):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
if self.buflist:
|
||||
self.buf = self.buf + string.joinfields(self.buflist, '')
|
||||
self.buflist = []
|
||||
i = string.find(self.buf, '\n', self.pos)
|
||||
if i < 0:
|
||||
newpos = self.len
|
||||
else:
|
||||
newpos = i+1
|
||||
if length is not None:
|
||||
if self.pos + length < newpos:
|
||||
newpos = self.pos + length
|
||||
r = self.buf[self.pos:newpos]
|
||||
self.pos = newpos
|
||||
return r
|
||||
|
||||
def readlines(self, sizehint = 0):
|
||||
total = 0
|
||||
lines = []
|
||||
line = self.readline()
|
||||
while line:
|
||||
lines.append(line)
|
||||
total += len(line)
|
||||
if 0 < sizehint <= total:
|
||||
break
|
||||
line = self.readline()
|
||||
return lines
|
||||
"""
|
||||
|
||||
def truncate(self, size=None):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
if size is None:
|
||||
size = self.pos
|
||||
elif size < 0:
|
||||
raise IOError(errno.EINVAL,
|
||||
"Negative size not allowed")
|
||||
elif size < self.pos:
|
||||
self.pos = size
|
||||
self.db.put(self.key, "", txn=self.txn, dlen=self.len-size, doff=size)
|
||||
|
||||
def write(self, s):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
if not s: return
|
||||
if self.pos > self.len:
|
||||
self.buflist.append('\0'*(self.pos - self.len))
|
||||
self.len = self.pos
|
||||
newpos = self.pos + len(s)
|
||||
self.db.put(self.key, s, txn=self.txn, dlen=len(s), doff=self.pos)
|
||||
self.pos = newpos
|
||||
|
||||
def writelines(self, list):
|
||||
self.write(string.joinfields(list, ''))
|
||||
|
||||
def flush(self):
|
||||
if self.closed:
|
||||
raise ValueError, "I/O operation on closed file"
|
||||
|
||||
|
||||
"""
|
||||
# A little test suite
|
||||
|
||||
def _test():
|
||||
import sys
|
||||
if sys.argv[1:]:
|
||||
file = sys.argv[1]
|
||||
else:
|
||||
file = '/etc/passwd'
|
||||
lines = open(file, 'r').readlines()
|
||||
text = open(file, 'r').read()
|
||||
f = StringIO()
|
||||
for line in lines[:-2]:
|
||||
f.write(line)
|
||||
f.writelines(lines[-2:])
|
||||
if f.getvalue() != text:
|
||||
raise RuntimeError, 'write failed'
|
||||
length = f.tell()
|
||||
print 'File length =', length
|
||||
f.seek(len(lines[0]))
|
||||
f.write(lines[1])
|
||||
f.seek(0)
|
||||
print 'First line =', repr(f.readline())
|
||||
here = f.tell()
|
||||
line = f.readline()
|
||||
print 'Second line =', repr(line)
|
||||
f.seek(-len(line), 1)
|
||||
line2 = f.read(len(line))
|
||||
if line != line2:
|
||||
raise RuntimeError, 'bad result after seek back'
|
||||
f.seek(len(line2), 1)
|
||||
list = f.readlines()
|
||||
line = list[-1]
|
||||
f.seek(f.tell() - len(line))
|
||||
line2 = f.read()
|
||||
if line != line2:
|
||||
raise RuntimeError, 'bad result after seek back from EOF'
|
||||
print 'Read', len(list), 'more lines'
|
||||
print 'File length =', f.tell()
|
||||
if f.tell() != length:
|
||||
raise RuntimeError, 'bad length'
|
||||
f.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
_test()
|
||||
"""
|
||||
|
|
@ -1,382 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#------------------------------------------------------------------------
|
||||
# Copyright (c) 1997-2001 by Total Control Software
|
||||
# All Rights Reserved
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Module Name: dbShelve.py
|
||||
#
|
||||
# Description: A reimplementation of the standard shelve.py that
|
||||
# forces the use of cPickle, and DB.
|
||||
#
|
||||
# Creation Date: 11/3/97 3:39:04PM
|
||||
#
|
||||
# License: This is free software. You may use this software for any
|
||||
# purpose including modification/redistribution, so long as
|
||||
# this header remains intact and that you do not claim any
|
||||
# rights of ownership or authorship of this software. This
|
||||
# software has been tested, but no warranty is expressed or
|
||||
# implied.
|
||||
#
|
||||
# 13-Dec-2000: Updated to be used with the new bsddb3 package.
|
||||
# Added DBShelfCursor class.
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
|
||||
"""Manage shelves of pickled objects using bsddb database files for the
|
||||
storage.
|
||||
"""
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
absolute_import = (sys.version_info[0] >= 3)
|
||||
if absolute_import :
|
||||
# Because this syntaxis is not valid before Python 2.5
|
||||
exec("from . import db")
|
||||
else :
|
||||
import db
|
||||
|
||||
if sys.version_info[0] >= 3 :
|
||||
import cPickle # Will be converted to "pickle" by "2to3"
|
||||
else :
|
||||
if sys.version_info < (2, 6) :
|
||||
import cPickle
|
||||
else :
|
||||
# When we drop support for python 2.4
|
||||
# we could use: (in 2.5 we need a __future__ statement)
|
||||
#
|
||||
# with warnings.catch_warnings():
|
||||
# warnings.filterwarnings(...)
|
||||
# ...
|
||||
#
|
||||
# We can not use "with" as is, because it would be invalid syntax
|
||||
# in python 2.4 and (with no __future__) 2.5.
|
||||
# Here we simulate "with" following PEP 343 :
|
||||
import warnings
|
||||
w = warnings.catch_warnings()
|
||||
w.__enter__()
|
||||
try :
|
||||
warnings.filterwarnings('ignore',
|
||||
message='the cPickle module has been removed in Python 3.0',
|
||||
category=DeprecationWarning)
|
||||
import cPickle
|
||||
finally :
|
||||
w.__exit__()
|
||||
del w
|
||||
|
||||
HIGHEST_PROTOCOL = cPickle.HIGHEST_PROTOCOL
|
||||
def _dumps(object, protocol):
|
||||
return cPickle.dumps(object, protocol=protocol)
|
||||
|
||||
if sys.version_info < (2, 6) :
|
||||
from UserDict import DictMixin as MutableMapping
|
||||
else :
|
||||
import collections
|
||||
MutableMapping = collections.MutableMapping
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
|
||||
|
||||
def open(filename, flags=db.DB_CREATE, mode=0660, filetype=db.DB_HASH,
|
||||
dbenv=None, dbname=None):
|
||||
"""
|
||||
A simple factory function for compatibility with the standard
|
||||
shleve.py module. It can be used like this, where key is a string
|
||||
and data is a pickleable object:
|
||||
|
||||
from bsddb import dbshelve
|
||||
db = dbshelve.open(filename)
|
||||
|
||||
db[key] = data
|
||||
|
||||
db.close()
|
||||
"""
|
||||
if type(flags) == type(''):
|
||||
sflag = flags
|
||||
if sflag == 'r':
|
||||
flags = db.DB_RDONLY
|
||||
elif sflag == 'rw':
|
||||
flags = 0
|
||||
elif sflag == 'w':
|
||||
flags = db.DB_CREATE
|
||||
elif sflag == 'c':
|
||||
flags = db.DB_CREATE
|
||||
elif sflag == 'n':
|
||||
flags = db.DB_TRUNCATE | db.DB_CREATE
|
||||
else:
|
||||
raise db.DBError, "flags should be one of 'r', 'w', 'c' or 'n' or use the bsddb.db.DB_* flags"
|
||||
|
||||
d = DBShelf(dbenv)
|
||||
d.open(filename, dbname, filetype, flags, mode)
|
||||
return d
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
class DBShelveError(db.DBError): pass
|
||||
|
||||
|
||||
class DBShelf(MutableMapping):
|
||||
"""A shelf to hold pickled objects, built upon a bsddb DB object. It
|
||||
automatically pickles/unpickles data objects going to/from the DB.
|
||||
"""
|
||||
def __init__(self, dbenv=None):
|
||||
self.db = db.DB(dbenv)
|
||||
self._closed = True
|
||||
if HIGHEST_PROTOCOL:
|
||||
self.protocol = HIGHEST_PROTOCOL
|
||||
else:
|
||||
self.protocol = 1
|
||||
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Many methods we can just pass through to the DB object.
|
||||
(See below)
|
||||
"""
|
||||
return getattr(self.db, name)
|
||||
|
||||
|
||||
#-----------------------------------
|
||||
# Dictionary access methods
|
||||
|
||||
def __len__(self):
|
||||
return len(self.db)
|
||||
|
||||
|
||||
def __getitem__(self, key):
|
||||
data = self.db[key]
|
||||
return cPickle.loads(data)
|
||||
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
data = _dumps(value, self.protocol)
|
||||
self.db[key] = data
|
||||
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.db[key]
|
||||
|
||||
|
||||
def keys(self, txn=None):
|
||||
if txn is not None:
|
||||
return self.db.keys(txn)
|
||||
else:
|
||||
return self.db.keys()
|
||||
|
||||
if sys.version_info >= (2, 6) :
|
||||
def __iter__(self) : # XXX: Load all keys in memory :-(
|
||||
for k in self.db.keys() :
|
||||
yield k
|
||||
|
||||
# Do this when "DB" support iteration
|
||||
# Or is it enough to pass thru "getattr"?
|
||||
#
|
||||
# def __iter__(self) :
|
||||
# return self.db.__iter__()
|
||||
|
||||
|
||||
def open(self, *args, **kwargs):
|
||||
self.db.open(*args, **kwargs)
|
||||
self._closed = False
|
||||
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
self.db.close(*args, **kwargs)
|
||||
self._closed = True
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
if self._closed:
|
||||
return '<DBShelf @ 0x%x - closed>' % (id(self))
|
||||
else:
|
||||
return repr(dict(self.iteritems()))
|
||||
|
||||
|
||||
def items(self, txn=None):
|
||||
if txn is not None:
|
||||
items = self.db.items(txn)
|
||||
else:
|
||||
items = self.db.items()
|
||||
newitems = []
|
||||
|
||||
for k, v in items:
|
||||
newitems.append( (k, cPickle.loads(v)) )
|
||||
return newitems
|
||||
|
||||
def values(self, txn=None):
|
||||
if txn is not None:
|
||||
values = self.db.values(txn)
|
||||
else:
|
||||
values = self.db.values()
|
||||
|
||||
return map(cPickle.loads, values)
|
||||
|
||||
#-----------------------------------
|
||||
# Other methods
|
||||
|
||||
def __append(self, value, txn=None):
|
||||
data = _dumps(value, self.protocol)
|
||||
return self.db.append(data, txn)
|
||||
|
||||
def append(self, value, txn=None):
|
||||
if self.get_type() == db.DB_RECNO:
|
||||
return self.__append(value, txn=txn)
|
||||
raise DBShelveError, "append() only supported when dbshelve opened with filetype=dbshelve.db.DB_RECNO"
|
||||
|
||||
|
||||
def associate(self, secondaryDB, callback, flags=0):
|
||||
def _shelf_callback(priKey, priData, realCallback=callback):
|
||||
# Safe in Python 2.x because expresion short circuit
|
||||
if sys.version_info[0] < 3 or isinstance(priData, bytes) :
|
||||
data = cPickle.loads(priData)
|
||||
else :
|
||||
data = cPickle.loads(bytes(priData, "iso8859-1")) # 8 bits
|
||||
return realCallback(priKey, data)
|
||||
|
||||
return self.db.associate(secondaryDB, _shelf_callback, flags)
|
||||
|
||||
|
||||
#def get(self, key, default=None, txn=None, flags=0):
|
||||
def get(self, *args, **kw):
|
||||
# We do it with *args and **kw so if the default value wasn't
|
||||
# given nothing is passed to the extension module. That way
|
||||
# an exception can be raised if set_get_returns_none is turned
|
||||
# off.
|
||||
data = self.db.get(*args, **kw)
|
||||
try:
|
||||
return cPickle.loads(data)
|
||||
except (EOFError, TypeError, cPickle.UnpicklingError):
|
||||
return data # we may be getting the default value, or None,
|
||||
# so it doesn't need unpickled.
|
||||
|
||||
def get_both(self, key, value, txn=None, flags=0):
|
||||
data = _dumps(value, self.protocol)
|
||||
data = self.db.get(key, data, txn, flags)
|
||||
return cPickle.loads(data)
|
||||
|
||||
|
||||
def cursor(self, txn=None, flags=0):
|
||||
c = DBShelfCursor(self.db.cursor(txn, flags))
|
||||
c.protocol = self.protocol
|
||||
return c
|
||||
|
||||
|
||||
def put(self, key, value, txn=None, flags=0):
|
||||
data = _dumps(value, self.protocol)
|
||||
return self.db.put(key, data, txn, flags)
|
||||
|
||||
|
||||
def join(self, cursorList, flags=0):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
#----------------------------------------------
|
||||
# Methods allowed to pass-through to self.db
|
||||
#
|
||||
# close, delete, fd, get_byteswapped, get_type, has_key,
|
||||
# key_range, open, remove, rename, stat, sync,
|
||||
# upgrade, verify, and all set_* methods.
|
||||
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
class DBShelfCursor:
|
||||
"""
|
||||
"""
|
||||
def __init__(self, cursor):
|
||||
self.dbc = cursor
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Some methods we can just pass through to the cursor object. (See below)"""
|
||||
return getattr(self.dbc, name)
|
||||
|
||||
|
||||
#----------------------------------------------
|
||||
|
||||
def dup(self, flags=0):
|
||||
c = DBShelfCursor(self.dbc.dup(flags))
|
||||
c.protocol = self.protocol
|
||||
return c
|
||||
|
||||
|
||||
def put(self, key, value, flags=0):
|
||||
data = _dumps(value, self.protocol)
|
||||
return self.dbc.put(key, data, flags)
|
||||
|
||||
|
||||
def get(self, *args):
|
||||
count = len(args) # a method overloading hack
|
||||
method = getattr(self, 'get_%d' % count)
|
||||
method(*args)
|
||||
|
||||
def get_1(self, flags):
|
||||
rec = self.dbc.get(flags)
|
||||
return self._extract(rec)
|
||||
|
||||
def get_2(self, key, flags):
|
||||
rec = self.dbc.get(key, flags)
|
||||
return self._extract(rec)
|
||||
|
||||
def get_3(self, key, value, flags):
|
||||
data = _dumps(value, self.protocol)
|
||||
rec = self.dbc.get(key, flags)
|
||||
return self._extract(rec)
|
||||
|
||||
|
||||
def current(self, flags=0): return self.get_1(flags|db.DB_CURRENT)
|
||||
def first(self, flags=0): return self.get_1(flags|db.DB_FIRST)
|
||||
def last(self, flags=0): return self.get_1(flags|db.DB_LAST)
|
||||
def next(self, flags=0): return self.get_1(flags|db.DB_NEXT)
|
||||
def prev(self, flags=0): return self.get_1(flags|db.DB_PREV)
|
||||
def consume(self, flags=0): return self.get_1(flags|db.DB_CONSUME)
|
||||
def next_dup(self, flags=0): return self.get_1(flags|db.DB_NEXT_DUP)
|
||||
def next_nodup(self, flags=0): return self.get_1(flags|db.DB_NEXT_NODUP)
|
||||
def prev_nodup(self, flags=0): return self.get_1(flags|db.DB_PREV_NODUP)
|
||||
|
||||
|
||||
def get_both(self, key, value, flags=0):
|
||||
data = _dumps(value, self.protocol)
|
||||
rec = self.dbc.get_both(key, flags)
|
||||
return self._extract(rec)
|
||||
|
||||
|
||||
def set(self, key, flags=0):
|
||||
rec = self.dbc.set(key, flags)
|
||||
return self._extract(rec)
|
||||
|
||||
def set_range(self, key, flags=0):
|
||||
rec = self.dbc.set_range(key, flags)
|
||||
return self._extract(rec)
|
||||
|
||||
def set_recno(self, recno, flags=0):
|
||||
rec = self.dbc.set_recno(recno, flags)
|
||||
return self._extract(rec)
|
||||
|
||||
set_both = get_both
|
||||
|
||||
def _extract(self, rec):
|
||||
if rec is None:
|
||||
return None
|
||||
else:
|
||||
key, data = rec
|
||||
# Safe in Python 2.x because expresion short circuit
|
||||
if sys.version_info[0] < 3 or isinstance(data, bytes) :
|
||||
return key, cPickle.loads(data)
|
||||
else :
|
||||
return key, cPickle.loads(bytes(data, "iso8859-1")) # 8 bits
|
||||
|
||||
#----------------------------------------------
|
||||
# Methods allowed to pass-through to self.dbc
|
||||
#
|
||||
# close, count, delete, get_recno, join_item
|
||||
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
|
|
@ -1,843 +0,0 @@
|
|||
#-----------------------------------------------------------------------
|
||||
#
|
||||
# Copyright (C) 2000, 2001 by Autonomous Zone Industries
|
||||
# Copyright (C) 2002 Gregory P. Smith
|
||||
#
|
||||
# License: This is free software. You may use this software for any
|
||||
# purpose including modification/redistribution, so long as
|
||||
# this header remains intact and that you do not claim any
|
||||
# rights of ownership or authorship of this software. This
|
||||
# software has been tested, but no warranty is expressed or
|
||||
# implied.
|
||||
#
|
||||
# -- Gregory P. Smith <greg@krypto.org>
|
||||
|
||||
# This provides a simple database table interface built on top of
|
||||
# the Python Berkeley DB 3 interface.
|
||||
#
|
||||
_cvsid = '$Id$'
|
||||
|
||||
import re
|
||||
import sys
|
||||
import copy
|
||||
import random
|
||||
import struct
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3 :
|
||||
import pickle
|
||||
else :
|
||||
if sys.version_info < (2, 6) :
|
||||
import cPickle as pickle
|
||||
else :
|
||||
# When we drop support for python 2.4
|
||||
# we could use: (in 2.5 we need a __future__ statement)
|
||||
#
|
||||
# with warnings.catch_warnings():
|
||||
# warnings.filterwarnings(...)
|
||||
# ...
|
||||
#
|
||||
# We can not use "with" as is, because it would be invalid syntax
|
||||
# in python 2.4 and (with no __future__) 2.5.
|
||||
# Here we simulate "with" following PEP 343 :
|
||||
import warnings
|
||||
w = warnings.catch_warnings()
|
||||
w.__enter__()
|
||||
try :
|
||||
warnings.filterwarnings('ignore',
|
||||
message='the cPickle module has been removed in Python 3.0',
|
||||
category=DeprecationWarning)
|
||||
import cPickle as pickle
|
||||
finally :
|
||||
w.__exit__()
|
||||
del w
|
||||
|
||||
try:
|
||||
# For Pythons w/distutils pybsddb
|
||||
from bsddb3 import db
|
||||
except ImportError:
|
||||
# For Python 2.3
|
||||
from bsddb import db
|
||||
|
||||
class TableDBError(StandardError):
|
||||
pass
|
||||
class TableAlreadyExists(TableDBError):
|
||||
pass
|
||||
|
||||
|
||||
class Cond:
|
||||
"""This condition matches everything"""
|
||||
def __call__(self, s):
|
||||
return 1
|
||||
|
||||
class ExactCond(Cond):
|
||||
"""Acts as an exact match condition function"""
|
||||
def __init__(self, strtomatch):
|
||||
self.strtomatch = strtomatch
|
||||
def __call__(self, s):
|
||||
return s == self.strtomatch
|
||||
|
||||
class PrefixCond(Cond):
|
||||
"""Acts as a condition function for matching a string prefix"""
|
||||
def __init__(self, prefix):
|
||||
self.prefix = prefix
|
||||
def __call__(self, s):
|
||||
return s[:len(self.prefix)] == self.prefix
|
||||
|
||||
class PostfixCond(Cond):
|
||||
"""Acts as a condition function for matching a string postfix"""
|
||||
def __init__(self, postfix):
|
||||
self.postfix = postfix
|
||||
def __call__(self, s):
|
||||
return s[-len(self.postfix):] == self.postfix
|
||||
|
||||
class LikeCond(Cond):
|
||||
"""
|
||||
Acts as a function that will match using an SQL 'LIKE' style
|
||||
string. Case insensitive and % signs are wild cards.
|
||||
This isn't perfect but it should work for the simple common cases.
|
||||
"""
|
||||
def __init__(self, likestr, re_flags=re.IGNORECASE):
|
||||
# escape python re characters
|
||||
chars_to_escape = '.*+()[]?'
|
||||
for char in chars_to_escape :
|
||||
likestr = likestr.replace(char, '\\'+char)
|
||||
# convert %s to wildcards
|
||||
self.likestr = likestr.replace('%', '.*')
|
||||
self.re = re.compile('^'+self.likestr+'$', re_flags)
|
||||
def __call__(self, s):
|
||||
return self.re.match(s)
|
||||
|
||||
#
|
||||
# keys used to store database metadata
|
||||
#
|
||||
_table_names_key = '__TABLE_NAMES__' # list of the tables in this db
|
||||
_columns = '._COLUMNS__' # table_name+this key contains a list of columns
|
||||
|
||||
def _columns_key(table):
|
||||
return table + _columns
|
||||
|
||||
#
|
||||
# these keys are found within table sub databases
|
||||
#
|
||||
_data = '._DATA_.' # this+column+this+rowid key contains table data
|
||||
_rowid = '._ROWID_.' # this+rowid+this key contains a unique entry for each
|
||||
# row in the table. (no data is stored)
|
||||
_rowid_str_len = 8 # length in bytes of the unique rowid strings
|
||||
|
||||
|
||||
def _data_key(table, col, rowid):
|
||||
return table + _data + col + _data + rowid
|
||||
|
||||
def _search_col_data_key(table, col):
|
||||
return table + _data + col + _data
|
||||
|
||||
def _search_all_data_key(table):
|
||||
return table + _data
|
||||
|
||||
def _rowid_key(table, rowid):
|
||||
return table + _rowid + rowid + _rowid
|
||||
|
||||
def _search_rowid_key(table):
|
||||
return table + _rowid
|
||||
|
||||
def contains_metastrings(s) :
|
||||
"""Verify that the given string does not contain any
|
||||
metadata strings that might interfere with dbtables database operation.
|
||||
"""
|
||||
if (s.find(_table_names_key) >= 0 or
|
||||
s.find(_columns) >= 0 or
|
||||
s.find(_data) >= 0 or
|
||||
s.find(_rowid) >= 0):
|
||||
# Then
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
class bsdTableDB :
|
||||
def __init__(self, filename, dbhome, create=0, truncate=0, mode=0600,
|
||||
recover=0, dbflags=0):
|
||||
"""bsdTableDB(filename, dbhome, create=0, truncate=0, mode=0600)
|
||||
|
||||
Open database name in the dbhome Berkeley DB directory.
|
||||
Use keyword arguments when calling this constructor.
|
||||
"""
|
||||
self.db = None
|
||||
myflags = db.DB_THREAD
|
||||
if create:
|
||||
myflags |= db.DB_CREATE
|
||||
flagsforenv = (db.DB_INIT_MPOOL | db.DB_INIT_LOCK | db.DB_INIT_LOG |
|
||||
db.DB_INIT_TXN | dbflags)
|
||||
# DB_AUTO_COMMIT isn't a valid flag for env.open()
|
||||
try:
|
||||
dbflags |= db.DB_AUTO_COMMIT
|
||||
except AttributeError:
|
||||
pass
|
||||
if recover:
|
||||
flagsforenv = flagsforenv | db.DB_RECOVER
|
||||
self.env = db.DBEnv()
|
||||
# enable auto deadlock avoidance
|
||||
self.env.set_lk_detect(db.DB_LOCK_DEFAULT)
|
||||
self.env.open(dbhome, myflags | flagsforenv)
|
||||
if truncate:
|
||||
myflags |= db.DB_TRUNCATE
|
||||
self.db = db.DB(self.env)
|
||||
# this code relies on DBCursor.set* methods to raise exceptions
|
||||
# rather than returning None
|
||||
self.db.set_get_returns_none(1)
|
||||
# allow duplicate entries [warning: be careful w/ metadata]
|
||||
self.db.set_flags(db.DB_DUP)
|
||||
self.db.open(filename, db.DB_BTREE, dbflags | myflags, mode)
|
||||
self.dbfilename = filename
|
||||
|
||||
if sys.version_info[0] >= 3 :
|
||||
class cursor_py3k(object) :
|
||||
def __init__(self, dbcursor) :
|
||||
self._dbcursor = dbcursor
|
||||
|
||||
def close(self) :
|
||||
return self._dbcursor.close()
|
||||
|
||||
def set_range(self, search) :
|
||||
v = self._dbcursor.set_range(bytes(search, "iso8859-1"))
|
||||
if v is not None :
|
||||
v = (v[0].decode("iso8859-1"),
|
||||
v[1].decode("iso8859-1"))
|
||||
return v
|
||||
|
||||
def __next__(self) :
|
||||
v = getattr(self._dbcursor, "next")()
|
||||
if v is not None :
|
||||
v = (v[0].decode("iso8859-1"),
|
||||
v[1].decode("iso8859-1"))
|
||||
return v
|
||||
|
||||
class db_py3k(object) :
|
||||
def __init__(self, db) :
|
||||
self._db = db
|
||||
|
||||
def cursor(self, txn=None) :
|
||||
return cursor_py3k(self._db.cursor(txn=txn))
|
||||
|
||||
def has_key(self, key, txn=None) :
|
||||
return getattr(self._db,"has_key")(bytes(key, "iso8859-1"),
|
||||
txn=txn)
|
||||
|
||||
def put(self, key, value, flags=0, txn=None) :
|
||||
key = bytes(key, "iso8859-1")
|
||||
if value is not None :
|
||||
value = bytes(value, "iso8859-1")
|
||||
return self._db.put(key, value, flags=flags, txn=txn)
|
||||
|
||||
def put_bytes(self, key, value, txn=None) :
|
||||
key = bytes(key, "iso8859-1")
|
||||
return self._db.put(key, value, txn=txn)
|
||||
|
||||
def get(self, key, txn=None, flags=0) :
|
||||
key = bytes(key, "iso8859-1")
|
||||
v = self._db.get(key, txn=txn, flags=flags)
|
||||
if v is not None :
|
||||
v = v.decode("iso8859-1")
|
||||
return v
|
||||
|
||||
def get_bytes(self, key, txn=None, flags=0) :
|
||||
key = bytes(key, "iso8859-1")
|
||||
return self._db.get(key, txn=txn, flags=flags)
|
||||
|
||||
def delete(self, key, txn=None) :
|
||||
key = bytes(key, "iso8859-1")
|
||||
return self._db.delete(key, txn=txn)
|
||||
|
||||
def close (self) :
|
||||
return self._db.close()
|
||||
|
||||
self.db = db_py3k(self.db)
|
||||
else : # Python 2.x
|
||||
pass
|
||||
|
||||
# Initialize the table names list if this is a new database
|
||||
txn = self.env.txn_begin()
|
||||
try:
|
||||
if not getattr(self.db, "has_key")(_table_names_key, txn):
|
||||
getattr(self.db, "put_bytes", self.db.put) \
|
||||
(_table_names_key, pickle.dumps([], 1), txn=txn)
|
||||
# Yes, bare except
|
||||
except:
|
||||
txn.abort()
|
||||
raise
|
||||
else:
|
||||
txn.commit()
|
||||
# TODO verify more of the database's metadata?
|
||||
self.__tablecolumns = {}
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if self.db is not None:
|
||||
self.db.close()
|
||||
self.db = None
|
||||
if self.env is not None:
|
||||
self.env.close()
|
||||
self.env = None
|
||||
|
||||
def checkpoint(self, mins=0):
|
||||
self.env.txn_checkpoint(mins)
|
||||
|
||||
def sync(self):
|
||||
self.db.sync()
|
||||
|
||||
def _db_print(self) :
|
||||
"""Print the database to stdout for debugging"""
|
||||
print "******** Printing raw database for debugging ********"
|
||||
cur = self.db.cursor()
|
||||
try:
|
||||
key, data = cur.first()
|
||||
while 1:
|
||||
print repr({key: data})
|
||||
next = cur.next()
|
||||
if next:
|
||||
key, data = next
|
||||
else:
|
||||
cur.close()
|
||||
return
|
||||
except db.DBNotFoundError:
|
||||
cur.close()
|
||||
|
||||
|
||||
def CreateTable(self, table, columns):
|
||||
"""CreateTable(table, columns) - Create a new table in the database.
|
||||
|
||||
raises TableDBError if it already exists or for other DB errors.
|
||||
"""
|
||||
assert isinstance(columns, list)
|
||||
|
||||
txn = None
|
||||
try:
|
||||
# checking sanity of the table and column names here on
|
||||
# table creation will prevent problems elsewhere.
|
||||
if contains_metastrings(table):
|
||||
raise ValueError(
|
||||
"bad table name: contains reserved metastrings")
|
||||
for column in columns :
|
||||
if contains_metastrings(column):
|
||||
raise ValueError(
|
||||
"bad column name: contains reserved metastrings")
|
||||
|
||||
columnlist_key = _columns_key(table)
|
||||
if getattr(self.db, "has_key")(columnlist_key):
|
||||
raise TableAlreadyExists, "table already exists"
|
||||
|
||||
txn = self.env.txn_begin()
|
||||
# store the table's column info
|
||||
getattr(self.db, "put_bytes", self.db.put)(columnlist_key,
|
||||
pickle.dumps(columns, 1), txn=txn)
|
||||
|
||||
# add the table name to the tablelist
|
||||
tablelist = pickle.loads(getattr(self.db, "get_bytes",
|
||||
self.db.get) (_table_names_key, txn=txn, flags=db.DB_RMW))
|
||||
tablelist.append(table)
|
||||
# delete 1st, in case we opened with DB_DUP
|
||||
self.db.delete(_table_names_key, txn=txn)
|
||||
getattr(self.db, "put_bytes", self.db.put)(_table_names_key,
|
||||
pickle.dumps(tablelist, 1), txn=txn)
|
||||
|
||||
txn.commit()
|
||||
txn = None
|
||||
except db.DBError, dberror:
|
||||
if txn:
|
||||
txn.abort()
|
||||
if sys.version_info < (2, 6) :
|
||||
raise TableDBError, dberror[1]
|
||||
else :
|
||||
raise TableDBError, dberror.args[1]
|
||||
|
||||
|
||||
def ListTableColumns(self, table):
|
||||
"""Return a list of columns in the given table.
|
||||
[] if the table doesn't exist.
|
||||
"""
|
||||
assert isinstance(table, str)
|
||||
if contains_metastrings(table):
|
||||
raise ValueError, "bad table name: contains reserved metastrings"
|
||||
|
||||
columnlist_key = _columns_key(table)
|
||||
if not getattr(self.db, "has_key")(columnlist_key):
|
||||
return []
|
||||
pickledcolumnlist = getattr(self.db, "get_bytes",
|
||||
self.db.get)(columnlist_key)
|
||||
if pickledcolumnlist:
|
||||
return pickle.loads(pickledcolumnlist)
|
||||
else:
|
||||
return []
|
||||
|
||||
def ListTables(self):
|
||||
"""Return a list of tables in this database."""
|
||||
pickledtablelist = self.db.get_get(_table_names_key)
|
||||
if pickledtablelist:
|
||||
return pickle.loads(pickledtablelist)
|
||||
else:
|
||||
return []
|
||||
|
||||
def CreateOrExtendTable(self, table, columns):
|
||||
"""CreateOrExtendTable(table, columns)
|
||||
|
||||
Create a new table in the database.
|
||||
|
||||
If a table of this name already exists, extend it to have any
|
||||
additional columns present in the given list as well as
|
||||
all of its current columns.
|
||||
"""
|
||||
assert isinstance(columns, list)
|
||||
|
||||
try:
|
||||
self.CreateTable(table, columns)
|
||||
except TableAlreadyExists:
|
||||
# the table already existed, add any new columns
|
||||
txn = None
|
||||
try:
|
||||
columnlist_key = _columns_key(table)
|
||||
txn = self.env.txn_begin()
|
||||
|
||||
# load the current column list
|
||||
oldcolumnlist = pickle.loads(
|
||||
getattr(self.db, "get_bytes",
|
||||
self.db.get)(columnlist_key, txn=txn, flags=db.DB_RMW))
|
||||
# create a hash table for fast lookups of column names in the
|
||||
# loop below
|
||||
oldcolumnhash = {}
|
||||
for c in oldcolumnlist:
|
||||
oldcolumnhash[c] = c
|
||||
|
||||
# create a new column list containing both the old and new
|
||||
# column names
|
||||
newcolumnlist = copy.copy(oldcolumnlist)
|
||||
for c in columns:
|
||||
if not c in oldcolumnhash:
|
||||
newcolumnlist.append(c)
|
||||
|
||||
# store the table's new extended column list
|
||||
if newcolumnlist != oldcolumnlist :
|
||||
# delete the old one first since we opened with DB_DUP
|
||||
self.db.delete(columnlist_key, txn=txn)
|
||||
getattr(self.db, "put_bytes", self.db.put)(columnlist_key,
|
||||
pickle.dumps(newcolumnlist, 1),
|
||||
txn=txn)
|
||||
|
||||
txn.commit()
|
||||
txn = None
|
||||
|
||||
self.__load_column_info(table)
|
||||
except db.DBError, dberror:
|
||||
if txn:
|
||||
txn.abort()
|
||||
if sys.version_info < (2, 6) :
|
||||
raise TableDBError, dberror[1]
|
||||
else :
|
||||
raise TableDBError, dberror.args[1]
|
||||
|
||||
|
||||
def __load_column_info(self, table) :
|
||||
"""initialize the self.__tablecolumns dict"""
|
||||
# check the column names
|
||||
try:
|
||||
tcolpickles = getattr(self.db, "get_bytes",
|
||||
self.db.get)(_columns_key(table))
|
||||
except db.DBNotFoundError:
|
||||
raise TableDBError, "unknown table: %r" % (table,)
|
||||
if not tcolpickles:
|
||||
raise TableDBError, "unknown table: %r" % (table,)
|
||||
self.__tablecolumns[table] = pickle.loads(tcolpickles)
|
||||
|
||||
def __new_rowid(self, table, txn) :
|
||||
"""Create a new unique row identifier"""
|
||||
unique = 0
|
||||
while not unique:
|
||||
# Generate a random 64-bit row ID string
|
||||
# (note: might have <64 bits of true randomness
|
||||
# but it's plenty for our database id needs!)
|
||||
blist = []
|
||||
for x in xrange(_rowid_str_len):
|
||||
blist.append(random.randint(0,255))
|
||||
newid = struct.pack('B'*_rowid_str_len, *blist)
|
||||
|
||||
if sys.version_info[0] >= 3 :
|
||||
newid = newid.decode("iso8859-1") # 8 bits
|
||||
|
||||
# Guarantee uniqueness by adding this key to the database
|
||||
try:
|
||||
self.db.put(_rowid_key(table, newid), None, txn=txn,
|
||||
flags=db.DB_NOOVERWRITE)
|
||||
except db.DBKeyExistError:
|
||||
pass
|
||||
else:
|
||||
unique = 1
|
||||
|
||||
return newid
|
||||
|
||||
|
||||
def Insert(self, table, rowdict) :
|
||||
"""Insert(table, datadict) - Insert a new row into the table
|
||||
using the keys+values from rowdict as the column values.
|
||||
"""
|
||||
|
||||
txn = None
|
||||
try:
|
||||
if not getattr(self.db, "has_key")(_columns_key(table)):
|
||||
raise TableDBError, "unknown table"
|
||||
|
||||
# check the validity of each column name
|
||||
if not table in self.__tablecolumns:
|
||||
self.__load_column_info(table)
|
||||
for column in rowdict.keys() :
|
||||
if not self.__tablecolumns[table].count(column):
|
||||
raise TableDBError, "unknown column: %r" % (column,)
|
||||
|
||||
# get a unique row identifier for this row
|
||||
txn = self.env.txn_begin()
|
||||
rowid = self.__new_rowid(table, txn=txn)
|
||||
|
||||
# insert the row values into the table database
|
||||
for column, dataitem in rowdict.items():
|
||||
# store the value
|
||||
self.db.put(_data_key(table, column, rowid), dataitem, txn=txn)
|
||||
|
||||
txn.commit()
|
||||
txn = None
|
||||
|
||||
except db.DBError, dberror:
|
||||
# WIBNI we could just abort the txn and re-raise the exception?
|
||||
# But no, because TableDBError is not related to DBError via
|
||||
# inheritance, so it would be backwards incompatible. Do the next
|
||||
# best thing.
|
||||
info = sys.exc_info()
|
||||
if txn:
|
||||
txn.abort()
|
||||
self.db.delete(_rowid_key(table, rowid))
|
||||
if sys.version_info < (2, 6) :
|
||||
raise TableDBError, dberror[1], info[2]
|
||||
else :
|
||||
raise TableDBError, dberror.args[1], info[2]
|
||||
|
||||
|
||||
def Modify(self, table, conditions={}, mappings={}):
|
||||
"""Modify(table, conditions={}, mappings={}) - Modify items in rows matching 'conditions' using mapping functions in 'mappings'
|
||||
|
||||
* table - the table name
|
||||
* conditions - a dictionary keyed on column names containing
|
||||
a condition callable expecting the data string as an
|
||||
argument and returning a boolean.
|
||||
* mappings - a dictionary keyed on column names containing a
|
||||
condition callable expecting the data string as an argument and
|
||||
returning the new string for that column.
|
||||
"""
|
||||
|
||||
try:
|
||||
matching_rowids = self.__Select(table, [], conditions)
|
||||
|
||||
# modify only requested columns
|
||||
columns = mappings.keys()
|
||||
for rowid in matching_rowids.keys():
|
||||
txn = None
|
||||
try:
|
||||
for column in columns:
|
||||
txn = self.env.txn_begin()
|
||||
# modify the requested column
|
||||
try:
|
||||
dataitem = self.db.get(
|
||||
_data_key(table, column, rowid),
|
||||
txn=txn)
|
||||
self.db.delete(
|
||||
_data_key(table, column, rowid),
|
||||
txn=txn)
|
||||
except db.DBNotFoundError:
|
||||
# XXXXXXX row key somehow didn't exist, assume no
|
||||
# error
|
||||
dataitem = None
|
||||
dataitem = mappings[column](dataitem)
|
||||
if dataitem is not None:
|
||||
self.db.put(
|
||||
_data_key(table, column, rowid),
|
||||
dataitem, txn=txn)
|
||||
txn.commit()
|
||||
txn = None
|
||||
|
||||
# catch all exceptions here since we call unknown callables
|
||||
except:
|
||||
if txn:
|
||||
txn.abort()
|
||||
raise
|
||||
|
||||
except db.DBError, dberror:
|
||||
if sys.version_info < (2, 6) :
|
||||
raise TableDBError, dberror[1]
|
||||
else :
|
||||
raise TableDBError, dberror.args[1]
|
||||
|
||||
def Delete(self, table, conditions={}):
|
||||
"""Delete(table, conditions) - Delete items matching the given
|
||||
conditions from the table.
|
||||
|
||||
* conditions - a dictionary keyed on column names containing
|
||||
condition functions expecting the data string as an
|
||||
argument and returning a boolean.
|
||||
"""
|
||||
|
||||
try:
|
||||
matching_rowids = self.__Select(table, [], conditions)
|
||||
|
||||
# delete row data from all columns
|
||||
columns = self.__tablecolumns[table]
|
||||
for rowid in matching_rowids.keys():
|
||||
txn = None
|
||||
try:
|
||||
txn = self.env.txn_begin()
|
||||
for column in columns:
|
||||
# delete the data key
|
||||
try:
|
||||
self.db.delete(_data_key(table, column, rowid),
|
||||
txn=txn)
|
||||
except db.DBNotFoundError:
|
||||
# XXXXXXX column may not exist, assume no error
|
||||
pass
|
||||
|
||||
try:
|
||||
self.db.delete(_rowid_key(table, rowid), txn=txn)
|
||||
except db.DBNotFoundError:
|
||||
# XXXXXXX row key somehow didn't exist, assume no error
|
||||
pass
|
||||
txn.commit()
|
||||
txn = None
|
||||
except db.DBError, dberror:
|
||||
if txn:
|
||||
txn.abort()
|
||||
raise
|
||||
except db.DBError, dberror:
|
||||
if sys.version_info < (2, 6) :
|
||||
raise TableDBError, dberror[1]
|
||||
else :
|
||||
raise TableDBError, dberror.args[1]
|
||||
|
||||
|
||||
def Select(self, table, columns, conditions={}):
|
||||
"""Select(table, columns, conditions) - retrieve specific row data
|
||||
Returns a list of row column->value mapping dictionaries.
|
||||
|
||||
* columns - a list of which column data to return. If
|
||||
columns is None, all columns will be returned.
|
||||
* conditions - a dictionary keyed on column names
|
||||
containing callable conditions expecting the data string as an
|
||||
argument and returning a boolean.
|
||||
"""
|
||||
try:
|
||||
if not table in self.__tablecolumns:
|
||||
self.__load_column_info(table)
|
||||
if columns is None:
|
||||
columns = self.__tablecolumns[table]
|
||||
matching_rowids = self.__Select(table, columns, conditions)
|
||||
except db.DBError, dberror:
|
||||
if sys.version_info < (2, 6) :
|
||||
raise TableDBError, dberror[1]
|
||||
else :
|
||||
raise TableDBError, dberror.args[1]
|
||||
# return the matches as a list of dictionaries
|
||||
return matching_rowids.values()
|
||||
|
||||
|
||||
def __Select(self, table, columns, conditions):
|
||||
"""__Select() - Used to implement Select and Delete (above)
|
||||
Returns a dictionary keyed on rowids containing dicts
|
||||
holding the row data for columns listed in the columns param
|
||||
that match the given conditions.
|
||||
* conditions is a dictionary keyed on column names
|
||||
containing callable conditions expecting the data string as an
|
||||
argument and returning a boolean.
|
||||
"""
|
||||
# check the validity of each column name
|
||||
if not table in self.__tablecolumns:
|
||||
self.__load_column_info(table)
|
||||
if columns is None:
|
||||
columns = self.tablecolumns[table]
|
||||
for column in (columns + conditions.keys()):
|
||||
if not self.__tablecolumns[table].count(column):
|
||||
raise TableDBError, "unknown column: %r" % (column,)
|
||||
|
||||
# keyed on rows that match so far, containings dicts keyed on
|
||||
# column names containing the data for that row and column.
|
||||
matching_rowids = {}
|
||||
# keys are rowids that do not match
|
||||
rejected_rowids = {}
|
||||
|
||||
# attempt to sort the conditions in such a way as to minimize full
|
||||
# column lookups
|
||||
def cmp_conditions(atuple, btuple):
|
||||
a = atuple[1]
|
||||
b = btuple[1]
|
||||
if type(a) is type(b):
|
||||
|
||||
# Needed for python 3. "cmp" vanished in 3.0.1
|
||||
def cmp(a, b) :
|
||||
if a==b : return 0
|
||||
if a<b : return -1
|
||||
return 1
|
||||
|
||||
if isinstance(a, PrefixCond) and isinstance(b, PrefixCond):
|
||||
# longest prefix first
|
||||
return cmp(len(b.prefix), len(a.prefix))
|
||||
if isinstance(a, LikeCond) and isinstance(b, LikeCond):
|
||||
# longest likestr first
|
||||
return cmp(len(b.likestr), len(a.likestr))
|
||||
return 0
|
||||
if isinstance(a, ExactCond):
|
||||
return -1
|
||||
if isinstance(b, ExactCond):
|
||||
return 1
|
||||
if isinstance(a, PrefixCond):
|
||||
return -1
|
||||
if isinstance(b, PrefixCond):
|
||||
return 1
|
||||
# leave all unknown condition callables alone as equals
|
||||
return 0
|
||||
|
||||
if sys.version_info < (2, 6) :
|
||||
conditionlist = conditions.items()
|
||||
conditionlist.sort(cmp_conditions)
|
||||
else : # Insertion Sort. Please, improve
|
||||
conditionlist = []
|
||||
for i in conditions.items() :
|
||||
for j, k in enumerate(conditionlist) :
|
||||
r = cmp_conditions(k, i)
|
||||
if r == 1 :
|
||||
conditionlist.insert(j, i)
|
||||
break
|
||||
else :
|
||||
conditionlist.append(i)
|
||||
|
||||
# Apply conditions to column data to find what we want
|
||||
cur = self.db.cursor()
|
||||
column_num = -1
|
||||
for column, condition in conditionlist:
|
||||
column_num = column_num + 1
|
||||
searchkey = _search_col_data_key(table, column)
|
||||
# speedup: don't linear search columns within loop
|
||||
if column in columns:
|
||||
savethiscolumndata = 1 # save the data for return
|
||||
else:
|
||||
savethiscolumndata = 0 # data only used for selection
|
||||
|
||||
try:
|
||||
key, data = cur.set_range(searchkey)
|
||||
while key[:len(searchkey)] == searchkey:
|
||||
# extract the rowid from the key
|
||||
rowid = key[-_rowid_str_len:]
|
||||
|
||||
if not rowid in rejected_rowids:
|
||||
# if no condition was specified or the condition
|
||||
# succeeds, add row to our match list.
|
||||
if not condition or condition(data):
|
||||
if not rowid in matching_rowids:
|
||||
matching_rowids[rowid] = {}
|
||||
if savethiscolumndata:
|
||||
matching_rowids[rowid][column] = data
|
||||
else:
|
||||
if rowid in matching_rowids:
|
||||
del matching_rowids[rowid]
|
||||
rejected_rowids[rowid] = rowid
|
||||
|
||||
key, data = cur.next()
|
||||
|
||||
except db.DBError, dberror:
|
||||
if dberror.args[0] != db.DB_NOTFOUND:
|
||||
raise
|
||||
continue
|
||||
|
||||
cur.close()
|
||||
|
||||
# we're done selecting rows, garbage collect the reject list
|
||||
del rejected_rowids
|
||||
|
||||
# extract any remaining desired column data from the
|
||||
# database for the matching rows.
|
||||
if len(columns) > 0:
|
||||
for rowid, rowdata in matching_rowids.items():
|
||||
for column in columns:
|
||||
if column in rowdata:
|
||||
continue
|
||||
try:
|
||||
rowdata[column] = self.db.get(
|
||||
_data_key(table, column, rowid))
|
||||
except db.DBError, dberror:
|
||||
if sys.version_info < (2, 6) :
|
||||
if dberror[0] != db.DB_NOTFOUND:
|
||||
raise
|
||||
else :
|
||||
if dberror.args[0] != db.DB_NOTFOUND:
|
||||
raise
|
||||
rowdata[column] = None
|
||||
|
||||
# return the matches
|
||||
return matching_rowids
|
||||
|
||||
|
||||
def Drop(self, table):
|
||||
"""Remove an entire table from the database"""
|
||||
txn = None
|
||||
try:
|
||||
txn = self.env.txn_begin()
|
||||
|
||||
# delete the column list
|
||||
self.db.delete(_columns_key(table), txn=txn)
|
||||
|
||||
cur = self.db.cursor(txn)
|
||||
|
||||
# delete all keys containing this tables column and row info
|
||||
table_key = _search_all_data_key(table)
|
||||
while 1:
|
||||
try:
|
||||
key, data = cur.set_range(table_key)
|
||||
except db.DBNotFoundError:
|
||||
break
|
||||
# only delete items in this table
|
||||
if key[:len(table_key)] != table_key:
|
||||
break
|
||||
cur.delete()
|
||||
|
||||
# delete all rowids used by this table
|
||||
table_key = _search_rowid_key(table)
|
||||
while 1:
|
||||
try:
|
||||
key, data = cur.set_range(table_key)
|
||||
except db.DBNotFoundError:
|
||||
break
|
||||
# only delete items in this table
|
||||
if key[:len(table_key)] != table_key:
|
||||
break
|
||||
cur.delete()
|
||||
|
||||
cur.close()
|
||||
|
||||
# delete the tablename from the table name list
|
||||
tablelist = pickle.loads(
|
||||
getattr(self.db, "get_bytes", self.db.get)(_table_names_key,
|
||||
txn=txn, flags=db.DB_RMW))
|
||||
try:
|
||||
tablelist.remove(table)
|
||||
except ValueError:
|
||||
# hmm, it wasn't there, oh well, that's what we want.
|
||||
pass
|
||||
# delete 1st, incase we opened with DB_DUP
|
||||
self.db.delete(_table_names_key, txn=txn)
|
||||
getattr(self.db, "put_bytes", self.db.put)(_table_names_key,
|
||||
pickle.dumps(tablelist, 1), txn=txn)
|
||||
|
||||
txn.commit()
|
||||
txn = None
|
||||
|
||||
if table in self.__tablecolumns:
|
||||
del self.__tablecolumns[table]
|
||||
|
||||
except db.DBError, dberror:
|
||||
if txn:
|
||||
txn.abort()
|
||||
raise TableDBError(dberror.args[1])
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Copyright (C) 2000 Autonomous Zone Industries
|
||||
#
|
||||
# License: This is free software. You may use this software for any
|
||||
# purpose including modification/redistribution, so long as
|
||||
# this header remains intact and that you do not claim any
|
||||
# rights of ownership or authorship of this software. This
|
||||
# software has been tested, but no warranty is expressed or
|
||||
# implied.
|
||||
#
|
||||
# Author: Gregory P. Smith <greg@krypto.org>
|
||||
#
|
||||
# Note: I don't know how useful this is in reality since when a
|
||||
# DBLockDeadlockError happens the current transaction is supposed to be
|
||||
# aborted. If it doesn't then when the operation is attempted again
|
||||
# the deadlock is still happening...
|
||||
# --Robin
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
|
||||
|
||||
#
|
||||
# import the time.sleep function in a namespace safe way to allow
|
||||
# "from bsddb.dbutils import *"
|
||||
#
|
||||
from time import sleep as _sleep
|
||||
|
||||
import sys
|
||||
absolute_import = (sys.version_info[0] >= 3)
|
||||
if absolute_import :
|
||||
# Because this syntaxis is not valid before Python 2.5
|
||||
exec("from . import db")
|
||||
else :
|
||||
import db
|
||||
|
||||
# always sleep at least N seconds between retrys
|
||||
_deadlock_MinSleepTime = 1.0/128
|
||||
# never sleep more than N seconds between retrys
|
||||
_deadlock_MaxSleepTime = 3.14159
|
||||
|
||||
# Assign a file object to this for a "sleeping" message to be written to it
|
||||
# each retry
|
||||
_deadlock_VerboseFile = None
|
||||
|
||||
|
||||
def DeadlockWrap(function, *_args, **_kwargs):
|
||||
"""DeadlockWrap(function, *_args, **_kwargs) - automatically retries
|
||||
function in case of a database deadlock.
|
||||
|
||||
This is a function intended to be used to wrap database calls such
|
||||
that they perform retrys with exponentially backing off sleeps in
|
||||
between when a DBLockDeadlockError exception is raised.
|
||||
|
||||
A 'max_retries' parameter may optionally be passed to prevent it
|
||||
from retrying forever (in which case the exception will be reraised).
|
||||
|
||||
d = DB(...)
|
||||
d.open(...)
|
||||
DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar"
|
||||
"""
|
||||
sleeptime = _deadlock_MinSleepTime
|
||||
max_retries = _kwargs.get('max_retries', -1)
|
||||
if 'max_retries' in _kwargs:
|
||||
del _kwargs['max_retries']
|
||||
while True:
|
||||
try:
|
||||
return function(*_args, **_kwargs)
|
||||
except db.DBLockDeadlockError:
|
||||
if _deadlock_VerboseFile:
|
||||
_deadlock_VerboseFile.write(
|
||||
'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime)
|
||||
_sleep(sleeptime)
|
||||
# exponential backoff in the sleep time
|
||||
sleeptime *= 2
|
||||
if sleeptime > _deadlock_MaxSleepTime:
|
||||
sleeptime = _deadlock_MaxSleepTime
|
||||
max_retries -= 1
|
||||
if max_retries == -1:
|
||||
raise
|
||||
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
|
|
@ -1,623 +0,0 @@
|
|||
"""Run all test cases.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
try:
|
||||
# For Pythons w/distutils pybsddb
|
||||
import bsddb3 as bsddb
|
||||
except ImportError:
|
||||
# For Python 2.3
|
||||
import bsddb
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3 :
|
||||
charset = "iso8859-1" # Full 8 bit
|
||||
|
||||
class logcursor_py3k(object) :
|
||||
def __init__(self, env) :
|
||||
self._logcursor = env.log_cursor()
|
||||
|
||||
def __getattr__(self, v) :
|
||||
return getattr(self._logcursor, v)
|
||||
|
||||
def __next__(self) :
|
||||
v = getattr(self._logcursor, "next")()
|
||||
if v is not None :
|
||||
v = (v[0], v[1].decode(charset))
|
||||
return v
|
||||
|
||||
next = __next__
|
||||
|
||||
def first(self) :
|
||||
v = self._logcursor.first()
|
||||
if v is not None :
|
||||
v = (v[0], v[1].decode(charset))
|
||||
return v
|
||||
|
||||
def last(self) :
|
||||
v = self._logcursor.last()
|
||||
if v is not None :
|
||||
v = (v[0], v[1].decode(charset))
|
||||
return v
|
||||
|
||||
def prev(self) :
|
||||
v = self._logcursor.prev()
|
||||
if v is not None :
|
||||
v = (v[0], v[1].decode(charset))
|
||||
return v
|
||||
|
||||
def current(self) :
|
||||
v = self._logcursor.current()
|
||||
if v is not None :
|
||||
v = (v[0], v[1].decode(charset))
|
||||
return v
|
||||
|
||||
def set(self, lsn) :
|
||||
v = self._logcursor.set(lsn)
|
||||
if v is not None :
|
||||
v = (v[0], v[1].decode(charset))
|
||||
return v
|
||||
|
||||
class cursor_py3k(object) :
|
||||
def __init__(self, db, *args, **kwargs) :
|
||||
self._dbcursor = db.cursor(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, v) :
|
||||
return getattr(self._dbcursor, v)
|
||||
|
||||
def _fix(self, v) :
|
||||
if v is None : return None
|
||||
key, value = v
|
||||
if isinstance(key, bytes) :
|
||||
key = key.decode(charset)
|
||||
return (key, value.decode(charset))
|
||||
|
||||
def __next__(self) :
|
||||
v = getattr(self._dbcursor, "next")()
|
||||
return self._fix(v)
|
||||
|
||||
next = __next__
|
||||
|
||||
def previous(self) :
|
||||
v = self._dbcursor.previous()
|
||||
return self._fix(v)
|
||||
|
||||
def last(self) :
|
||||
v = self._dbcursor.last()
|
||||
return self._fix(v)
|
||||
|
||||
def set(self, k) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
v = self._dbcursor.set(k)
|
||||
return self._fix(v)
|
||||
|
||||
def set_recno(self, num) :
|
||||
v = self._dbcursor.set_recno(num)
|
||||
return self._fix(v)
|
||||
|
||||
def set_range(self, k, dlen=-1, doff=-1) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
v = self._dbcursor.set_range(k, dlen=dlen, doff=doff)
|
||||
return self._fix(v)
|
||||
|
||||
def dup(self, flags=0) :
|
||||
cursor = self._dbcursor.dup(flags)
|
||||
return dup_cursor_py3k(cursor)
|
||||
|
||||
def next_dup(self) :
|
||||
v = self._dbcursor.next_dup()
|
||||
return self._fix(v)
|
||||
|
||||
def next_nodup(self) :
|
||||
v = self._dbcursor.next_nodup()
|
||||
return self._fix(v)
|
||||
|
||||
def put(self, key, data, flags=0, dlen=-1, doff=-1) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
if isinstance(data, str) :
|
||||
value = bytes(data, charset)
|
||||
return self._dbcursor.put(key, data, flags=flags, dlen=dlen,
|
||||
doff=doff)
|
||||
|
||||
def current(self, flags=0, dlen=-1, doff=-1) :
|
||||
v = self._dbcursor.current(flags=flags, dlen=dlen, doff=doff)
|
||||
return self._fix(v)
|
||||
|
||||
def first(self) :
|
||||
v = self._dbcursor.first()
|
||||
return self._fix(v)
|
||||
|
||||
def pget(self, key=None, data=None, flags=0) :
|
||||
# Incorrect because key can be a bare number,
|
||||
# but enough to pass testsuite
|
||||
if isinstance(key, int) and (data is None) and (flags == 0) :
|
||||
flags = key
|
||||
key = None
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
if isinstance(data, int) and (flags==0) :
|
||||
flags = data
|
||||
data = None
|
||||
if isinstance(data, str) :
|
||||
data = bytes(data, charset)
|
||||
v=self._dbcursor.pget(key=key, data=data, flags=flags)
|
||||
if v is not None :
|
||||
v1, v2, v3 = v
|
||||
if isinstance(v1, bytes) :
|
||||
v1 = v1.decode(charset)
|
||||
if isinstance(v2, bytes) :
|
||||
v2 = v2.decode(charset)
|
||||
|
||||
v = (v1, v2, v3.decode(charset))
|
||||
|
||||
return v
|
||||
|
||||
def join_item(self) :
|
||||
v = self._dbcursor.join_item()
|
||||
if v is not None :
|
||||
v = v.decode(charset)
|
||||
return v
|
||||
|
||||
def get(self, *args, **kwargs) :
|
||||
l = len(args)
|
||||
if l == 2 :
|
||||
k, f = args
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, "iso8859-1")
|
||||
args = (k, f)
|
||||
elif l == 3 :
|
||||
k, d, f = args
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
if isinstance(d, str) :
|
||||
d = bytes(d, charset)
|
||||
args =(k, d, f)
|
||||
|
||||
v = self._dbcursor.get(*args, **kwargs)
|
||||
if v is not None :
|
||||
k, v = v
|
||||
if isinstance(k, bytes) :
|
||||
k = k.decode(charset)
|
||||
v = (k, v.decode(charset))
|
||||
return v
|
||||
|
||||
def get_both(self, key, value) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
if isinstance(value, str) :
|
||||
value = bytes(value, charset)
|
||||
v=self._dbcursor.get_both(key, value)
|
||||
return self._fix(v)
|
||||
|
||||
class dup_cursor_py3k(cursor_py3k) :
|
||||
def __init__(self, dbcursor) :
|
||||
self._dbcursor = dbcursor
|
||||
|
||||
class DB_py3k(object) :
|
||||
def __init__(self, *args, **kwargs) :
|
||||
args2=[]
|
||||
for i in args :
|
||||
if isinstance(i, DBEnv_py3k) :
|
||||
i = i._dbenv
|
||||
args2.append(i)
|
||||
args = tuple(args2)
|
||||
for k, v in kwargs.items() :
|
||||
if isinstance(v, DBEnv_py3k) :
|
||||
kwargs[k] = v._dbenv
|
||||
|
||||
self._db = bsddb._db.DB_orig(*args, **kwargs)
|
||||
|
||||
def __contains__(self, k) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
return getattr(self._db, "has_key")(k)
|
||||
|
||||
def __getitem__(self, k) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
v = self._db[k]
|
||||
if v is not None :
|
||||
v = v.decode(charset)
|
||||
return v
|
||||
|
||||
def __setitem__(self, k, v) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
if isinstance(v, str) :
|
||||
v = bytes(v, charset)
|
||||
self._db[k] = v
|
||||
|
||||
def __delitem__(self, k) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
del self._db[k]
|
||||
|
||||
def __getattr__(self, v) :
|
||||
return getattr(self._db, v)
|
||||
|
||||
def __len__(self) :
|
||||
return len(self._db)
|
||||
|
||||
def has_key(self, k, txn=None) :
|
||||
if isinstance(k, str) :
|
||||
k = bytes(k, charset)
|
||||
return self._db.has_key(k, txn=txn)
|
||||
|
||||
def set_re_delim(self, c) :
|
||||
if isinstance(c, str) : # We can use a numeric value byte too
|
||||
c = bytes(c, charset)
|
||||
return self._db.set_re_delim(c)
|
||||
|
||||
def set_re_pad(self, c) :
|
||||
if isinstance(c, str) : # We can use a numeric value byte too
|
||||
c = bytes(c, charset)
|
||||
return self._db.set_re_pad(c)
|
||||
|
||||
def get_re_source(self) :
|
||||
source = self._db.get_re_source()
|
||||
return source.decode(charset)
|
||||
|
||||
def put(self, key, data, txn=None, flags=0, dlen=-1, doff=-1) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
if isinstance(data, str) :
|
||||
value = bytes(data, charset)
|
||||
return self._db.put(key, data, flags=flags, txn=txn, dlen=dlen,
|
||||
doff=doff)
|
||||
|
||||
def append(self, value, txn=None) :
|
||||
if isinstance(value, str) :
|
||||
value = bytes(value, charset)
|
||||
return self._db.append(value, txn=txn)
|
||||
|
||||
def get_size(self, key) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
return self._db.get_size(key)
|
||||
|
||||
def exists(self, key, *args, **kwargs) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
return self._db.exists(key, *args, **kwargs)
|
||||
|
||||
def get(self, key, default="MagicCookie", txn=None, flags=0, dlen=-1, doff=-1) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
if default != "MagicCookie" : # Magic for 'test_get_none.py'
|
||||
v=self._db.get(key, default=default, txn=txn, flags=flags,
|
||||
dlen=dlen, doff=doff)
|
||||
else :
|
||||
v=self._db.get(key, txn=txn, flags=flags,
|
||||
dlen=dlen, doff=doff)
|
||||
if (v is not None) and isinstance(v, bytes) :
|
||||
v = v.decode(charset)
|
||||
return v
|
||||
|
||||
def pget(self, key, txn=None) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
v=self._db.pget(key, txn=txn)
|
||||
if v is not None :
|
||||
v1, v2 = v
|
||||
if isinstance(v1, bytes) :
|
||||
v1 = v1.decode(charset)
|
||||
|
||||
v = (v1, v2.decode(charset))
|
||||
return v
|
||||
|
||||
def get_both(self, key, value, txn=None, flags=0) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
if isinstance(value, str) :
|
||||
value = bytes(value, charset)
|
||||
v=self._db.get_both(key, value, txn=txn, flags=flags)
|
||||
if v is not None :
|
||||
v = v.decode(charset)
|
||||
return v
|
||||
|
||||
def delete(self, key, txn=None) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
return self._db.delete(key, txn=txn)
|
||||
|
||||
def keys(self) :
|
||||
k = self._db.keys()
|
||||
if len(k) and isinstance(k[0], bytes) :
|
||||
return [i.decode(charset) for i in self._db.keys()]
|
||||
else :
|
||||
return k
|
||||
|
||||
def items(self) :
|
||||
data = self._db.items()
|
||||
if not len(data) : return data
|
||||
data2 = []
|
||||
for k, v in data :
|
||||
if isinstance(k, bytes) :
|
||||
k = k.decode(charset)
|
||||
data2.append((k, v.decode(charset)))
|
||||
return data2
|
||||
|
||||
def associate(self, secondarydb, callback, flags=0, txn=None) :
|
||||
class associate_callback(object) :
|
||||
def __init__(self, callback) :
|
||||
self._callback = callback
|
||||
|
||||
def callback(self, key, data) :
|
||||
if isinstance(key, str) :
|
||||
key = key.decode(charset)
|
||||
data = data.decode(charset)
|
||||
key = self._callback(key, data)
|
||||
if (key != bsddb._db.DB_DONOTINDEX) :
|
||||
if isinstance(key, str) :
|
||||
key = bytes(key, charset)
|
||||
elif isinstance(key, list) :
|
||||
key2 = []
|
||||
for i in key :
|
||||
if isinstance(i, str) :
|
||||
i = bytes(i, charset)
|
||||
key2.append(i)
|
||||
key = key2
|
||||
return key
|
||||
|
||||
return self._db.associate(secondarydb._db,
|
||||
associate_callback(callback).callback, flags=flags,
|
||||
txn=txn)
|
||||
|
||||
def cursor(self, txn=None, flags=0) :
|
||||
return cursor_py3k(self._db, txn=txn, flags=flags)
|
||||
|
||||
def join(self, cursor_list) :
|
||||
cursor_list = [i._dbcursor for i in cursor_list]
|
||||
return dup_cursor_py3k(self._db.join(cursor_list))
|
||||
|
||||
class DBEnv_py3k(object) :
|
||||
def __init__(self, *args, **kwargs) :
|
||||
self._dbenv = bsddb._db.DBEnv_orig(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, v) :
|
||||
return getattr(self._dbenv, v)
|
||||
|
||||
def log_cursor(self, flags=0) :
|
||||
return logcursor_py3k(self._dbenv)
|
||||
|
||||
def get_lg_dir(self) :
|
||||
return self._dbenv.get_lg_dir().decode(charset)
|
||||
|
||||
def get_tmp_dir(self) :
|
||||
return self._dbenv.get_tmp_dir().decode(charset)
|
||||
|
||||
def get_data_dirs(self) :
|
||||
return tuple(
|
||||
(i.decode(charset) for i in self._dbenv.get_data_dirs()))
|
||||
|
||||
class DBSequence_py3k(object) :
|
||||
def __init__(self, db, *args, **kwargs) :
|
||||
self._db=db
|
||||
self._dbsequence = bsddb._db.DBSequence_orig(db._db, *args, **kwargs)
|
||||
|
||||
def __getattr__(self, v) :
|
||||
return getattr(self._dbsequence, v)
|
||||
|
||||
def open(self, key, *args, **kwargs) :
|
||||
return self._dbsequence.open(bytes(key, charset), *args, **kwargs)
|
||||
|
||||
def get_key(self) :
|
||||
return self._dbsequence.get_key().decode(charset)
|
||||
|
||||
def get_dbp(self) :
|
||||
return self._db
|
||||
|
||||
import string
|
||||
string.letters=[chr(i) for i in xrange(65,91)]
|
||||
|
||||
bsddb._db.DBEnv_orig = bsddb._db.DBEnv
|
||||
bsddb._db.DB_orig = bsddb._db.DB
|
||||
if bsddb.db.version() <= (4, 3) :
|
||||
bsddb._db.DBSequence_orig = None
|
||||
else :
|
||||
bsddb._db.DBSequence_orig = bsddb._db.DBSequence
|
||||
|
||||
def do_proxy_db_py3k(flag) :
|
||||
flag2 = do_proxy_db_py3k.flag
|
||||
do_proxy_db_py3k.flag = flag
|
||||
if flag :
|
||||
bsddb.DBEnv = bsddb.db.DBEnv = bsddb._db.DBEnv = DBEnv_py3k
|
||||
bsddb.DB = bsddb.db.DB = bsddb._db.DB = DB_py3k
|
||||
bsddb._db.DBSequence = DBSequence_py3k
|
||||
else :
|
||||
bsddb.DBEnv = bsddb.db.DBEnv = bsddb._db.DBEnv = bsddb._db.DBEnv_orig
|
||||
bsddb.DB = bsddb.db.DB = bsddb._db.DB = bsddb._db.DB_orig
|
||||
bsddb._db.DBSequence = bsddb._db.DBSequence_orig
|
||||
return flag2
|
||||
|
||||
do_proxy_db_py3k.flag = False
|
||||
do_proxy_db_py3k(True)
|
||||
|
||||
try:
|
||||
# For Pythons w/distutils pybsddb
|
||||
from bsddb3 import db, dbtables, dbutils, dbshelve, \
|
||||
hashopen, btopen, rnopen, dbobj
|
||||
except ImportError:
|
||||
# For Python 2.3
|
||||
from bsddb import db, dbtables, dbutils, dbshelve, \
|
||||
hashopen, btopen, rnopen, dbobj
|
||||
|
||||
try:
|
||||
from bsddb3 import test_support
|
||||
except ImportError:
|
||||
if sys.version_info[0] < 3 :
|
||||
from test import test_support
|
||||
else :
|
||||
from test import support as test_support
|
||||
|
||||
|
||||
try:
|
||||
if sys.version_info[0] < 3 :
|
||||
from threading import Thread, currentThread
|
||||
del Thread, currentThread
|
||||
else :
|
||||
from threading import Thread, current_thread
|
||||
del Thread, current_thread
|
||||
have_threads = True
|
||||
except ImportError:
|
||||
have_threads = False
|
||||
|
||||
verbose = 0
|
||||
if 'verbose' in sys.argv:
|
||||
verbose = 1
|
||||
sys.argv.remove('verbose')
|
||||
|
||||
if 'silent' in sys.argv: # take care of old flag, just in case
|
||||
verbose = 0
|
||||
sys.argv.remove('silent')
|
||||
|
||||
|
||||
def print_versions():
|
||||
print
|
||||
print '-=' * 38
|
||||
print db.DB_VERSION_STRING
|
||||
print 'bsddb.db.version(): %s' % (db.version(), )
|
||||
if db.version() >= (5, 0) :
|
||||
print 'bsddb.db.full_version(): %s' %repr(db.full_version())
|
||||
print 'bsddb.db.__version__: %s' % db.__version__
|
||||
print 'bsddb.db.cvsid: %s' % db.cvsid
|
||||
|
||||
# Workaround for allowing generating an EGGs as a ZIP files.
|
||||
suffix="__"
|
||||
print 'py module: %s' % getattr(bsddb, "__file"+suffix)
|
||||
print 'extension module: %s' % getattr(bsddb, "__file"+suffix)
|
||||
|
||||
print 'python version: %s' % sys.version
|
||||
print 'My pid: %s' % os.getpid()
|
||||
print '-=' * 38
|
||||
|
||||
|
||||
def get_new_path(name) :
|
||||
get_new_path.mutex.acquire()
|
||||
try :
|
||||
import os
|
||||
path=os.path.join(get_new_path.prefix,
|
||||
name+"_"+str(os.getpid())+"_"+str(get_new_path.num))
|
||||
get_new_path.num+=1
|
||||
finally :
|
||||
get_new_path.mutex.release()
|
||||
return path
|
||||
|
||||
def get_new_environment_path() :
|
||||
path=get_new_path("environment")
|
||||
import os
|
||||
try:
|
||||
os.makedirs(path,mode=0700)
|
||||
except os.error:
|
||||
test_support.rmtree(path)
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
def get_new_database_path() :
|
||||
path=get_new_path("database")
|
||||
import os
|
||||
if os.path.exists(path) :
|
||||
os.remove(path)
|
||||
return path
|
||||
|
||||
|
||||
# This path can be overriden via "set_test_path_prefix()".
|
||||
import os, os.path
|
||||
get_new_path.prefix=os.path.join(os.environ.get("TMPDIR",
|
||||
os.path.join(os.sep,"tmp")), "z-Berkeley_DB")
|
||||
get_new_path.num=0
|
||||
|
||||
def get_test_path_prefix() :
|
||||
return get_new_path.prefix
|
||||
|
||||
def set_test_path_prefix(path) :
|
||||
get_new_path.prefix=path
|
||||
|
||||
def remove_test_path_directory() :
|
||||
test_support.rmtree(get_new_path.prefix)
|
||||
|
||||
if have_threads :
|
||||
import threading
|
||||
get_new_path.mutex=threading.Lock()
|
||||
del threading
|
||||
else :
|
||||
class Lock(object) :
|
||||
def acquire(self) :
|
||||
pass
|
||||
def release(self) :
|
||||
pass
|
||||
get_new_path.mutex=Lock()
|
||||
del Lock
|
||||
|
||||
|
||||
|
||||
class PrintInfoFakeTest(unittest.TestCase):
|
||||
def testPrintVersions(self):
|
||||
print_versions()
|
||||
|
||||
|
||||
# This little hack is for when this module is run as main and all the
|
||||
# other modules import it so they will still be able to get the right
|
||||
# verbose setting. It's confusing but it works.
|
||||
if sys.version_info[0] < 3 :
|
||||
import test_all
|
||||
test_all.verbose = verbose
|
||||
else :
|
||||
import sys
|
||||
print >>sys.stderr, "Work to do!"
|
||||
|
||||
|
||||
def suite(module_prefix='', timing_check=None):
|
||||
test_modules = [
|
||||
'test_associate',
|
||||
'test_basics',
|
||||
'test_dbenv',
|
||||
'test_db',
|
||||
'test_compare',
|
||||
'test_compat',
|
||||
'test_cursor_pget_bug',
|
||||
'test_dbobj',
|
||||
'test_dbshelve',
|
||||
'test_dbtables',
|
||||
'test_distributed_transactions',
|
||||
'test_early_close',
|
||||
'test_fileid',
|
||||
'test_get_none',
|
||||
'test_join',
|
||||
'test_lock',
|
||||
'test_misc',
|
||||
'test_pickle',
|
||||
'test_queue',
|
||||
'test_recno',
|
||||
'test_replication',
|
||||
'test_sequence',
|
||||
'test_thread',
|
||||
]
|
||||
|
||||
alltests = unittest.TestSuite()
|
||||
for name in test_modules:
|
||||
#module = __import__(name)
|
||||
# Do it this way so that suite may be called externally via
|
||||
# python's Lib/test/test_bsddb3.
|
||||
module = __import__(module_prefix+name, globals(), locals(), name)
|
||||
|
||||
alltests.addTest(module.test_suite())
|
||||
if timing_check:
|
||||
alltests.addTest(unittest.makeSuite(timing_check))
|
||||
return alltests
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(PrintInfoFakeTest))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print_versions()
|
||||
unittest.main(defaultTest='suite')
|
||||
|
|
@ -1,465 +0,0 @@
|
|||
"""
|
||||
TestCases for DB.associate.
|
||||
"""
|
||||
|
||||
import sys, os, string
|
||||
import time
|
||||
from pprint import pprint
|
||||
|
||||
import unittest
|
||||
from test_all import db, dbshelve, test_support, verbose, have_threads, \
|
||||
get_new_environment_path
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
musicdata = {
|
||||
1 : ("Bad English", "The Price Of Love", "Rock"),
|
||||
2 : ("DNA featuring Suzanne Vega", "Tom's Diner", "Rock"),
|
||||
3 : ("George Michael", "Praying For Time", "Rock"),
|
||||
4 : ("Gloria Estefan", "Here We Are", "Rock"),
|
||||
5 : ("Linda Ronstadt", "Don't Know Much", "Rock"),
|
||||
6 : ("Michael Bolton", "How Am I Supposed To Live Without You", "Blues"),
|
||||
7 : ("Paul Young", "Oh Girl", "Rock"),
|
||||
8 : ("Paula Abdul", "Opposites Attract", "Rock"),
|
||||
9 : ("Richard Marx", "Should've Known Better", "Rock"),
|
||||
10: ("Rod Stewart", "Forever Young", "Rock"),
|
||||
11: ("Roxette", "Dangerous", "Rock"),
|
||||
12: ("Sheena Easton", "The Lover In Me", "Rock"),
|
||||
13: ("Sinead O'Connor", "Nothing Compares 2 U", "Rock"),
|
||||
14: ("Stevie B.", "Because I Love You", "Rock"),
|
||||
15: ("Taylor Dayne", "Love Will Lead You Back", "Rock"),
|
||||
16: ("The Bangles", "Eternal Flame", "Rock"),
|
||||
17: ("Wilson Phillips", "Release Me", "Rock"),
|
||||
18: ("Billy Joel", "Blonde Over Blue", "Rock"),
|
||||
19: ("Billy Joel", "Famous Last Words", "Rock"),
|
||||
20: ("Billy Joel", "Lullabye (Goodnight, My Angel)", "Rock"),
|
||||
21: ("Billy Joel", "The River Of Dreams", "Rock"),
|
||||
22: ("Billy Joel", "Two Thousand Years", "Rock"),
|
||||
23: ("Janet Jackson", "Alright", "Rock"),
|
||||
24: ("Janet Jackson", "Black Cat", "Rock"),
|
||||
25: ("Janet Jackson", "Come Back To Me", "Rock"),
|
||||
26: ("Janet Jackson", "Escapade", "Rock"),
|
||||
27: ("Janet Jackson", "Love Will Never Do (Without You)", "Rock"),
|
||||
28: ("Janet Jackson", "Miss You Much", "Rock"),
|
||||
29: ("Janet Jackson", "Rhythm Nation", "Rock"),
|
||||
30: ("Janet Jackson", "State Of The World", "Rock"),
|
||||
31: ("Janet Jackson", "The Knowledge", "Rock"),
|
||||
32: ("Spyro Gyra", "End of Romanticism", "Jazz"),
|
||||
33: ("Spyro Gyra", "Heliopolis", "Jazz"),
|
||||
34: ("Spyro Gyra", "Jubilee", "Jazz"),
|
||||
35: ("Spyro Gyra", "Little Linda", "Jazz"),
|
||||
36: ("Spyro Gyra", "Morning Dance", "Jazz"),
|
||||
37: ("Spyro Gyra", "Song for Lorraine", "Jazz"),
|
||||
38: ("Yes", "Owner Of A Lonely Heart", "Rock"),
|
||||
39: ("Yes", "Rhythm Of Love", "Rock"),
|
||||
40: ("Cusco", "Dream Catcher", "New Age"),
|
||||
41: ("Cusco", "Geronimos Laughter", "New Age"),
|
||||
42: ("Cusco", "Ghost Dance", "New Age"),
|
||||
43: ("Blue Man Group", "Drumbone", "New Age"),
|
||||
44: ("Blue Man Group", "Endless Column", "New Age"),
|
||||
45: ("Blue Man Group", "Klein Mandelbrot", "New Age"),
|
||||
46: ("Kenny G", "Silhouette", "Jazz"),
|
||||
47: ("Sade", "Smooth Operator", "Jazz"),
|
||||
48: ("David Arkenstone", "Papillon (On The Wings Of The Butterfly)",
|
||||
"New Age"),
|
||||
49: ("David Arkenstone", "Stepping Stars", "New Age"),
|
||||
50: ("David Arkenstone", "Carnation Lily Lily Rose", "New Age"),
|
||||
51: ("David Lanz", "Behind The Waterfall", "New Age"),
|
||||
52: ("David Lanz", "Cristofori's Dream", "New Age"),
|
||||
53: ("David Lanz", "Heartsounds", "New Age"),
|
||||
54: ("David Lanz", "Leaves on the Seine", "New Age"),
|
||||
99: ("unknown artist", "Unnamed song", "Unknown"),
|
||||
}
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class AssociateErrorTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.filename = self.__class__.__name__ + '.db'
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
|
||||
def tearDown(self):
|
||||
self.env.close()
|
||||
self.env = None
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test00_associateDBError(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test00_associateDBError..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
dupDB = db.DB(self.env)
|
||||
dupDB.set_flags(db.DB_DUP)
|
||||
dupDB.open(self.filename, "primary", db.DB_BTREE, db.DB_CREATE)
|
||||
|
||||
secDB = db.DB(self.env)
|
||||
secDB.open(self.filename, "secondary", db.DB_BTREE, db.DB_CREATE)
|
||||
|
||||
# dupDB has been configured to allow duplicates, it can't
|
||||
# associate with a secondary. Berkeley DB will return an error.
|
||||
try:
|
||||
def f(a,b): return a+b
|
||||
dupDB.associate(secDB, f)
|
||||
except db.DBError:
|
||||
# good
|
||||
secDB.close()
|
||||
dupDB.close()
|
||||
else:
|
||||
secDB.close()
|
||||
dupDB.close()
|
||||
self.fail("DBError exception was expected")
|
||||
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
class AssociateTestCase(unittest.TestCase):
|
||||
keytype = ''
|
||||
envFlags = 0
|
||||
dbFlags = 0
|
||||
|
||||
def setUp(self):
|
||||
self.filename = self.__class__.__name__ + '.db'
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOCK | db.DB_THREAD | self.envFlags)
|
||||
|
||||
def tearDown(self):
|
||||
self.closeDB()
|
||||
self.env.close()
|
||||
self.env = None
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def addDataToDB(self, d, txn=None):
|
||||
for key, value in musicdata.items():
|
||||
if type(self.keytype) == type(''):
|
||||
key = "%02d" % key
|
||||
d.put(key, '|'.join(value), txn=txn)
|
||||
|
||||
def createDB(self, txn=None):
|
||||
self.cur = None
|
||||
self.secDB = None
|
||||
self.primary = db.DB(self.env)
|
||||
self.primary.set_get_returns_none(2)
|
||||
self.primary.open(self.filename, "primary", self.dbtype,
|
||||
db.DB_CREATE | db.DB_THREAD | self.dbFlags, txn=txn)
|
||||
|
||||
def closeDB(self):
|
||||
if self.cur:
|
||||
self.cur.close()
|
||||
self.cur = None
|
||||
if self.secDB:
|
||||
self.secDB.close()
|
||||
self.secDB = None
|
||||
self.primary.close()
|
||||
self.primary = None
|
||||
|
||||
def getDB(self):
|
||||
return self.primary
|
||||
|
||||
|
||||
def _associateWithDB(self, getGenre):
|
||||
self.createDB()
|
||||
|
||||
self.secDB = db.DB(self.env)
|
||||
self.secDB.set_flags(db.DB_DUP)
|
||||
self.secDB.set_get_returns_none(2)
|
||||
self.secDB.open(self.filename, "secondary", db.DB_BTREE,
|
||||
db.DB_CREATE | db.DB_THREAD | self.dbFlags)
|
||||
self.getDB().associate(self.secDB, getGenre)
|
||||
|
||||
self.addDataToDB(self.getDB())
|
||||
|
||||
self.finish_test(self.secDB)
|
||||
|
||||
def test01_associateWithDB(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test01_associateWithDB..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
return self._associateWithDB(self.getGenre)
|
||||
|
||||
def _associateAfterDB(self, getGenre) :
|
||||
self.createDB()
|
||||
self.addDataToDB(self.getDB())
|
||||
|
||||
self.secDB = db.DB(self.env)
|
||||
self.secDB.set_flags(db.DB_DUP)
|
||||
self.secDB.open(self.filename, "secondary", db.DB_BTREE,
|
||||
db.DB_CREATE | db.DB_THREAD | self.dbFlags)
|
||||
|
||||
# adding the DB_CREATE flag will cause it to index existing records
|
||||
self.getDB().associate(self.secDB, getGenre, db.DB_CREATE)
|
||||
|
||||
self.finish_test(self.secDB)
|
||||
|
||||
def test02_associateAfterDB(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test02_associateAfterDB..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
return self._associateAfterDB(self.getGenre)
|
||||
|
||||
if db.version() >= (4, 6):
|
||||
def test03_associateWithDB(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test03_associateWithDB..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
return self._associateWithDB(self.getGenreList)
|
||||
|
||||
def test04_associateAfterDB(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test04_associateAfterDB..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
return self._associateAfterDB(self.getGenreList)
|
||||
|
||||
|
||||
def finish_test(self, secDB, txn=None):
|
||||
# 'Blues' should not be in the secondary database
|
||||
vals = secDB.pget('Blues', txn=txn)
|
||||
self.assertEqual(vals, None, vals)
|
||||
|
||||
vals = secDB.pget('Unknown', txn=txn)
|
||||
self.assertTrue(vals[0] == 99 or vals[0] == '99', vals)
|
||||
vals[1].index('Unknown')
|
||||
vals[1].index('Unnamed')
|
||||
vals[1].index('unknown')
|
||||
|
||||
if verbose:
|
||||
print "Primary key traversal:"
|
||||
self.cur = self.getDB().cursor(txn)
|
||||
count = 0
|
||||
rec = self.cur.first()
|
||||
while rec is not None:
|
||||
if type(self.keytype) == type(''):
|
||||
self.assertTrue(int(rec[0])) # for primary db, key is a number
|
||||
else:
|
||||
self.assertTrue(rec[0] and type(rec[0]) == type(0))
|
||||
count = count + 1
|
||||
if verbose:
|
||||
print rec
|
||||
rec = getattr(self.cur, "next")()
|
||||
self.assertEqual(count, len(musicdata)) # all items accounted for
|
||||
|
||||
|
||||
if verbose:
|
||||
print "Secondary key traversal:"
|
||||
self.cur = secDB.cursor(txn)
|
||||
count = 0
|
||||
|
||||
# test cursor pget
|
||||
vals = self.cur.pget('Unknown', flags=db.DB_LAST)
|
||||
self.assertTrue(vals[1] == 99 or vals[1] == '99', vals)
|
||||
self.assertEqual(vals[0], 'Unknown')
|
||||
vals[2].index('Unknown')
|
||||
vals[2].index('Unnamed')
|
||||
vals[2].index('unknown')
|
||||
|
||||
vals = self.cur.pget('Unknown', data='wrong value', flags=db.DB_GET_BOTH)
|
||||
self.assertEqual(vals, None, vals)
|
||||
|
||||
rec = self.cur.first()
|
||||
self.assertEqual(rec[0], "Jazz")
|
||||
while rec is not None:
|
||||
count = count + 1
|
||||
if verbose:
|
||||
print rec
|
||||
rec = getattr(self.cur, "next")()
|
||||
# all items accounted for EXCEPT for 1 with "Blues" genre
|
||||
self.assertEqual(count, len(musicdata)-1)
|
||||
|
||||
self.cur = None
|
||||
|
||||
def getGenre(self, priKey, priData):
|
||||
self.assertEqual(type(priData), type(""))
|
||||
genre = priData.split('|')[2]
|
||||
|
||||
if verbose:
|
||||
print 'getGenre key: %r data: %r' % (priKey, priData)
|
||||
|
||||
if genre == 'Blues':
|
||||
return db.DB_DONOTINDEX
|
||||
else:
|
||||
return genre
|
||||
|
||||
def getGenreList(self, priKey, PriData) :
|
||||
v = self.getGenre(priKey, PriData)
|
||||
if type(v) == type("") :
|
||||
v = [v]
|
||||
return v
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
class AssociateHashTestCase(AssociateTestCase):
|
||||
dbtype = db.DB_HASH
|
||||
|
||||
class AssociateBTreeTestCase(AssociateTestCase):
|
||||
dbtype = db.DB_BTREE
|
||||
|
||||
class AssociateRecnoTestCase(AssociateTestCase):
|
||||
dbtype = db.DB_RECNO
|
||||
keytype = 0
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class AssociateBTreeTxnTestCase(AssociateBTreeTestCase):
|
||||
envFlags = db.DB_INIT_TXN
|
||||
dbFlags = 0
|
||||
|
||||
def txn_finish_test(self, sDB, txn):
|
||||
try:
|
||||
self.finish_test(sDB, txn=txn)
|
||||
finally:
|
||||
if self.cur:
|
||||
self.cur.close()
|
||||
self.cur = None
|
||||
if txn:
|
||||
txn.commit()
|
||||
|
||||
def test13_associate_in_transaction(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test13_associateAutoCommit..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
txn = self.env.txn_begin()
|
||||
try:
|
||||
self.createDB(txn=txn)
|
||||
|
||||
self.secDB = db.DB(self.env)
|
||||
self.secDB.set_flags(db.DB_DUP)
|
||||
self.secDB.set_get_returns_none(2)
|
||||
self.secDB.open(self.filename, "secondary", db.DB_BTREE,
|
||||
db.DB_CREATE | db.DB_THREAD, txn=txn)
|
||||
self.getDB().associate(self.secDB, self.getGenre, txn=txn)
|
||||
|
||||
self.addDataToDB(self.getDB(), txn=txn)
|
||||
except:
|
||||
txn.abort()
|
||||
raise
|
||||
|
||||
self.txn_finish_test(self.secDB, txn=txn)
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class ShelveAssociateTestCase(AssociateTestCase):
|
||||
|
||||
def createDB(self):
|
||||
self.primary = dbshelve.open(self.filename,
|
||||
dbname="primary",
|
||||
dbenv=self.env,
|
||||
filetype=self.dbtype)
|
||||
|
||||
def addDataToDB(self, d):
|
||||
for key, value in musicdata.items():
|
||||
if type(self.keytype) == type(''):
|
||||
key = "%02d" % key
|
||||
d.put(key, value) # save the value as is this time
|
||||
|
||||
|
||||
def getGenre(self, priKey, priData):
|
||||
self.assertEqual(type(priData), type(()))
|
||||
if verbose:
|
||||
print 'getGenre key: %r data: %r' % (priKey, priData)
|
||||
genre = priData[2]
|
||||
if genre == 'Blues':
|
||||
return db.DB_DONOTINDEX
|
||||
else:
|
||||
return genre
|
||||
|
||||
|
||||
class ShelveAssociateHashTestCase(ShelveAssociateTestCase):
|
||||
dbtype = db.DB_HASH
|
||||
|
||||
class ShelveAssociateBTreeTestCase(ShelveAssociateTestCase):
|
||||
dbtype = db.DB_BTREE
|
||||
|
||||
class ShelveAssociateRecnoTestCase(ShelveAssociateTestCase):
|
||||
dbtype = db.DB_RECNO
|
||||
keytype = 0
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class ThreadedAssociateTestCase(AssociateTestCase):
|
||||
|
||||
def addDataToDB(self, d):
|
||||
t1 = Thread(target = self.writer1,
|
||||
args = (d, ))
|
||||
t2 = Thread(target = self.writer2,
|
||||
args = (d, ))
|
||||
|
||||
t1.setDaemon(True)
|
||||
t2.setDaemon(True)
|
||||
t1.start()
|
||||
t2.start()
|
||||
t1.join()
|
||||
t2.join()
|
||||
|
||||
def writer1(self, d):
|
||||
for key, value in musicdata.items():
|
||||
if type(self.keytype) == type(''):
|
||||
key = "%02d" % key
|
||||
d.put(key, '|'.join(value))
|
||||
|
||||
def writer2(self, d):
|
||||
for x in range(100, 600):
|
||||
key = 'z%2d' % x
|
||||
value = [key] * 4
|
||||
d.put(key, '|'.join(value))
|
||||
|
||||
|
||||
class ThreadedAssociateHashTestCase(ShelveAssociateTestCase):
|
||||
dbtype = db.DB_HASH
|
||||
|
||||
class ThreadedAssociateBTreeTestCase(ShelveAssociateTestCase):
|
||||
dbtype = db.DB_BTREE
|
||||
|
||||
class ThreadedAssociateRecnoTestCase(ShelveAssociateTestCase):
|
||||
dbtype = db.DB_RECNO
|
||||
keytype = 0
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
suite.addTest(unittest.makeSuite(AssociateErrorTestCase))
|
||||
|
||||
suite.addTest(unittest.makeSuite(AssociateHashTestCase))
|
||||
suite.addTest(unittest.makeSuite(AssociateBTreeTestCase))
|
||||
suite.addTest(unittest.makeSuite(AssociateRecnoTestCase))
|
||||
|
||||
suite.addTest(unittest.makeSuite(AssociateBTreeTxnTestCase))
|
||||
|
||||
suite.addTest(unittest.makeSuite(ShelveAssociateHashTestCase))
|
||||
suite.addTest(unittest.makeSuite(ShelveAssociateBTreeTestCase))
|
||||
suite.addTest(unittest.makeSuite(ShelveAssociateRecnoTestCase))
|
||||
|
||||
if have_threads:
|
||||
suite.addTest(unittest.makeSuite(ThreadedAssociateHashTestCase))
|
||||
suite.addTest(unittest.makeSuite(ThreadedAssociateBTreeTestCase))
|
||||
suite.addTest(unittest.makeSuite(ThreadedAssociateRecnoTestCase))
|
||||
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,447 +0,0 @@
|
|||
"""
|
||||
TestCases for python DB duplicate and Btree key comparison function.
|
||||
"""
|
||||
|
||||
import sys, os, re
|
||||
import test_all
|
||||
from cStringIO import StringIO
|
||||
|
||||
import unittest
|
||||
|
||||
from test_all import db, dbshelve, test_support, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
|
||||
# Needed for python 3. "cmp" vanished in 3.0.1
|
||||
def cmp(a, b) :
|
||||
if a==b : return 0
|
||||
if a<b : return -1
|
||||
return 1
|
||||
|
||||
lexical_cmp = cmp
|
||||
|
||||
def lowercase_cmp(left, right) :
|
||||
return cmp(left.lower(), right.lower())
|
||||
|
||||
def make_reverse_comparator(cmp) :
|
||||
def reverse(left, right, delegate=cmp) :
|
||||
return - delegate(left, right)
|
||||
return reverse
|
||||
|
||||
_expected_lexical_test_data = ['', 'CCCP', 'a', 'aaa', 'b', 'c', 'cccce', 'ccccf']
|
||||
_expected_lowercase_test_data = ['', 'a', 'aaa', 'b', 'c', 'CC', 'cccce', 'ccccf', 'CCCP']
|
||||
|
||||
class ComparatorTests(unittest.TestCase) :
|
||||
def comparator_test_helper(self, comparator, expected_data) :
|
||||
data = expected_data[:]
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 6) :
|
||||
data.sort(cmp=comparator)
|
||||
else : # Insertion Sort. Please, improve
|
||||
data2 = []
|
||||
for i in data :
|
||||
for j, k in enumerate(data2) :
|
||||
r = comparator(k, i)
|
||||
if r == 1 :
|
||||
data2.insert(j, i)
|
||||
break
|
||||
else :
|
||||
data2.append(i)
|
||||
data = data2
|
||||
|
||||
self.assertEqual(data, expected_data,
|
||||
"comparator `%s' is not right: %s vs. %s"
|
||||
% (comparator, expected_data, data))
|
||||
def test_lexical_comparator(self) :
|
||||
self.comparator_test_helper(lexical_cmp, _expected_lexical_test_data)
|
||||
def test_reverse_lexical_comparator(self) :
|
||||
rev = _expected_lexical_test_data[:]
|
||||
rev.reverse()
|
||||
self.comparator_test_helper(make_reverse_comparator(lexical_cmp),
|
||||
rev)
|
||||
def test_lowercase_comparator(self) :
|
||||
self.comparator_test_helper(lowercase_cmp,
|
||||
_expected_lowercase_test_data)
|
||||
|
||||
class AbstractBtreeKeyCompareTestCase(unittest.TestCase) :
|
||||
env = None
|
||||
db = None
|
||||
|
||||
if (sys.version_info < (2, 7)) or ((sys.version_info >= (3,0)) and
|
||||
(sys.version_info < (3, 2))) :
|
||||
def assertLess(self, a, b, msg=None) :
|
||||
return self.assertTrue(a<b, msg=msg)
|
||||
|
||||
def setUp(self) :
|
||||
self.filename = self.__class__.__name__ + '.db'
|
||||
self.homeDir = get_new_environment_path()
|
||||
env = db.DBEnv()
|
||||
env.open(self.homeDir,
|
||||
db.DB_CREATE | db.DB_INIT_MPOOL
|
||||
| db.DB_INIT_LOCK | db.DB_THREAD)
|
||||
self.env = env
|
||||
|
||||
def tearDown(self) :
|
||||
self.closeDB()
|
||||
if self.env is not None:
|
||||
self.env.close()
|
||||
self.env = None
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def addDataToDB(self, data) :
|
||||
i = 0
|
||||
for item in data:
|
||||
self.db.put(item, str(i))
|
||||
i = i + 1
|
||||
|
||||
def createDB(self, key_comparator) :
|
||||
self.db = db.DB(self.env)
|
||||
self.setupDB(key_comparator)
|
||||
self.db.open(self.filename, "test", db.DB_BTREE, db.DB_CREATE)
|
||||
|
||||
def setupDB(self, key_comparator) :
|
||||
self.db.set_bt_compare(key_comparator)
|
||||
|
||||
def closeDB(self) :
|
||||
if self.db is not None:
|
||||
self.db.close()
|
||||
self.db = None
|
||||
|
||||
def startTest(self) :
|
||||
pass
|
||||
|
||||
def finishTest(self, expected = None) :
|
||||
if expected is not None:
|
||||
self.check_results(expected)
|
||||
self.closeDB()
|
||||
|
||||
def check_results(self, expected) :
|
||||
curs = self.db.cursor()
|
||||
try:
|
||||
index = 0
|
||||
rec = curs.first()
|
||||
while rec:
|
||||
key, ignore = rec
|
||||
self.assertLess(index, len(expected),
|
||||
"to many values returned from cursor")
|
||||
self.assertEqual(expected[index], key,
|
||||
"expected value `%s' at %d but got `%s'"
|
||||
% (expected[index], index, key))
|
||||
index = index + 1
|
||||
rec = curs.next()
|
||||
self.assertEqual(index, len(expected),
|
||||
"not enough values returned from cursor")
|
||||
finally:
|
||||
curs.close()
|
||||
|
||||
class BtreeKeyCompareTestCase(AbstractBtreeKeyCompareTestCase) :
|
||||
def runCompareTest(self, comparator, data) :
|
||||
self.startTest()
|
||||
self.createDB(comparator)
|
||||
self.addDataToDB(data)
|
||||
self.finishTest(data)
|
||||
|
||||
def test_lexical_ordering(self) :
|
||||
self.runCompareTest(lexical_cmp, _expected_lexical_test_data)
|
||||
|
||||
def test_reverse_lexical_ordering(self) :
|
||||
expected_rev_data = _expected_lexical_test_data[:]
|
||||
expected_rev_data.reverse()
|
||||
self.runCompareTest(make_reverse_comparator(lexical_cmp),
|
||||
expected_rev_data)
|
||||
|
||||
def test_compare_function_useless(self) :
|
||||
self.startTest()
|
||||
def socialist_comparator(l, r) :
|
||||
return 0
|
||||
self.createDB(socialist_comparator)
|
||||
self.addDataToDB(['b', 'a', 'd'])
|
||||
# all things being equal the first key will be the only key
|
||||
# in the database... (with the last key's value fwiw)
|
||||
self.finishTest(['b'])
|
||||
|
||||
|
||||
class BtreeExceptionsTestCase(AbstractBtreeKeyCompareTestCase) :
|
||||
def test_raises_non_callable(self) :
|
||||
self.startTest()
|
||||
self.assertRaises(TypeError, self.createDB, 'abc')
|
||||
self.assertRaises(TypeError, self.createDB, None)
|
||||
self.finishTest()
|
||||
|
||||
def test_set_bt_compare_with_function(self) :
|
||||
self.startTest()
|
||||
self.createDB(lexical_cmp)
|
||||
self.finishTest()
|
||||
|
||||
def check_results(self, results) :
|
||||
pass
|
||||
|
||||
def test_compare_function_incorrect(self) :
|
||||
self.startTest()
|
||||
def bad_comparator(l, r) :
|
||||
return 1
|
||||
# verify that set_bt_compare checks that comparator('', '') == 0
|
||||
self.assertRaises(TypeError, self.createDB, bad_comparator)
|
||||
self.finishTest()
|
||||
|
||||
def verifyStderr(self, method, successRe) :
|
||||
"""
|
||||
Call method() while capturing sys.stderr output internally and
|
||||
call self.fail() if successRe.search() does not match the stderr
|
||||
output. This is used to test for uncatchable exceptions.
|
||||
"""
|
||||
stdErr = sys.stderr
|
||||
sys.stderr = StringIO()
|
||||
try:
|
||||
method()
|
||||
finally:
|
||||
temp = sys.stderr
|
||||
sys.stderr = stdErr
|
||||
errorOut = temp.getvalue()
|
||||
if not successRe.search(errorOut) :
|
||||
self.fail("unexpected stderr output:\n"+errorOut)
|
||||
if sys.version_info < (3, 0) : # XXX: How to do this in Py3k ???
|
||||
sys.exc_traceback = sys.last_traceback = None
|
||||
|
||||
def _test_compare_function_exception(self) :
|
||||
self.startTest()
|
||||
def bad_comparator(l, r) :
|
||||
if l == r:
|
||||
# pass the set_bt_compare test
|
||||
return 0
|
||||
raise RuntimeError, "i'm a naughty comparison function"
|
||||
self.createDB(bad_comparator)
|
||||
#print "\n*** test should print 2 uncatchable tracebacks ***"
|
||||
self.addDataToDB(['a', 'b', 'c']) # this should raise, but...
|
||||
self.finishTest()
|
||||
|
||||
def test_compare_function_exception(self) :
|
||||
self.verifyStderr(
|
||||
self._test_compare_function_exception,
|
||||
re.compile('(^RuntimeError:.* naughty.*){2}', re.M|re.S)
|
||||
)
|
||||
|
||||
def _test_compare_function_bad_return(self) :
|
||||
self.startTest()
|
||||
def bad_comparator(l, r) :
|
||||
if l == r:
|
||||
# pass the set_bt_compare test
|
||||
return 0
|
||||
return l
|
||||
self.createDB(bad_comparator)
|
||||
#print "\n*** test should print 2 errors about returning an int ***"
|
||||
self.addDataToDB(['a', 'b', 'c']) # this should raise, but...
|
||||
self.finishTest()
|
||||
|
||||
def test_compare_function_bad_return(self) :
|
||||
self.verifyStderr(
|
||||
self._test_compare_function_bad_return,
|
||||
re.compile('(^TypeError:.* return an int.*){2}', re.M|re.S)
|
||||
)
|
||||
|
||||
|
||||
def test_cannot_assign_twice(self) :
|
||||
|
||||
def my_compare(a, b) :
|
||||
return 0
|
||||
|
||||
self.startTest()
|
||||
self.createDB(my_compare)
|
||||
self.assertRaises(RuntimeError, self.db.set_bt_compare, my_compare)
|
||||
|
||||
class AbstractDuplicateCompareTestCase(unittest.TestCase) :
|
||||
env = None
|
||||
db = None
|
||||
|
||||
if (sys.version_info < (2, 7)) or ((sys.version_info >= (3,0)) and
|
||||
(sys.version_info < (3, 2))) :
|
||||
def assertLess(self, a, b, msg=None) :
|
||||
return self.assertTrue(a<b, msg=msg)
|
||||
|
||||
def setUp(self) :
|
||||
self.filename = self.__class__.__name__ + '.db'
|
||||
self.homeDir = get_new_environment_path()
|
||||
env = db.DBEnv()
|
||||
env.open(self.homeDir,
|
||||
db.DB_CREATE | db.DB_INIT_MPOOL
|
||||
| db.DB_INIT_LOCK | db.DB_THREAD)
|
||||
self.env = env
|
||||
|
||||
def tearDown(self) :
|
||||
self.closeDB()
|
||||
if self.env is not None:
|
||||
self.env.close()
|
||||
self.env = None
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def addDataToDB(self, data) :
|
||||
for item in data:
|
||||
self.db.put("key", item)
|
||||
|
||||
def createDB(self, dup_comparator) :
|
||||
self.db = db.DB(self.env)
|
||||
self.setupDB(dup_comparator)
|
||||
self.db.open(self.filename, "test", db.DB_BTREE, db.DB_CREATE)
|
||||
|
||||
def setupDB(self, dup_comparator) :
|
||||
self.db.set_flags(db.DB_DUPSORT)
|
||||
self.db.set_dup_compare(dup_comparator)
|
||||
|
||||
def closeDB(self) :
|
||||
if self.db is not None:
|
||||
self.db.close()
|
||||
self.db = None
|
||||
|
||||
def startTest(self) :
|
||||
pass
|
||||
|
||||
def finishTest(self, expected = None) :
|
||||
if expected is not None:
|
||||
self.check_results(expected)
|
||||
self.closeDB()
|
||||
|
||||
def check_results(self, expected) :
|
||||
curs = self.db.cursor()
|
||||
try:
|
||||
index = 0
|
||||
rec = curs.first()
|
||||
while rec:
|
||||
ignore, data = rec
|
||||
self.assertLess(index, len(expected),
|
||||
"to many values returned from cursor")
|
||||
self.assertEqual(expected[index], data,
|
||||
"expected value `%s' at %d but got `%s'"
|
||||
% (expected[index], index, data))
|
||||
index = index + 1
|
||||
rec = curs.next()
|
||||
self.assertEqual(index, len(expected),
|
||||
"not enough values returned from cursor")
|
||||
finally:
|
||||
curs.close()
|
||||
|
||||
class DuplicateCompareTestCase(AbstractDuplicateCompareTestCase) :
|
||||
def runCompareTest(self, comparator, data) :
|
||||
self.startTest()
|
||||
self.createDB(comparator)
|
||||
self.addDataToDB(data)
|
||||
self.finishTest(data)
|
||||
|
||||
def test_lexical_ordering(self) :
|
||||
self.runCompareTest(lexical_cmp, _expected_lexical_test_data)
|
||||
|
||||
def test_reverse_lexical_ordering(self) :
|
||||
expected_rev_data = _expected_lexical_test_data[:]
|
||||
expected_rev_data.reverse()
|
||||
self.runCompareTest(make_reverse_comparator(lexical_cmp),
|
||||
expected_rev_data)
|
||||
|
||||
class DuplicateExceptionsTestCase(AbstractDuplicateCompareTestCase) :
|
||||
def test_raises_non_callable(self) :
|
||||
self.startTest()
|
||||
self.assertRaises(TypeError, self.createDB, 'abc')
|
||||
self.assertRaises(TypeError, self.createDB, None)
|
||||
self.finishTest()
|
||||
|
||||
def test_set_dup_compare_with_function(self) :
|
||||
self.startTest()
|
||||
self.createDB(lexical_cmp)
|
||||
self.finishTest()
|
||||
|
||||
def check_results(self, results) :
|
||||
pass
|
||||
|
||||
def test_compare_function_incorrect(self) :
|
||||
self.startTest()
|
||||
def bad_comparator(l, r) :
|
||||
return 1
|
||||
# verify that set_dup_compare checks that comparator('', '') == 0
|
||||
self.assertRaises(TypeError, self.createDB, bad_comparator)
|
||||
self.finishTest()
|
||||
|
||||
def test_compare_function_useless(self) :
|
||||
self.startTest()
|
||||
def socialist_comparator(l, r) :
|
||||
return 0
|
||||
self.createDB(socialist_comparator)
|
||||
# DUPSORT does not allow "duplicate duplicates"
|
||||
self.assertRaises(db.DBKeyExistError, self.addDataToDB, ['b', 'a', 'd'])
|
||||
self.finishTest()
|
||||
|
||||
def verifyStderr(self, method, successRe) :
|
||||
"""
|
||||
Call method() while capturing sys.stderr output internally and
|
||||
call self.fail() if successRe.search() does not match the stderr
|
||||
output. This is used to test for uncatchable exceptions.
|
||||
"""
|
||||
stdErr = sys.stderr
|
||||
sys.stderr = StringIO()
|
||||
try:
|
||||
method()
|
||||
finally:
|
||||
temp = sys.stderr
|
||||
sys.stderr = stdErr
|
||||
errorOut = temp.getvalue()
|
||||
if not successRe.search(errorOut) :
|
||||
self.fail("unexpected stderr output:\n"+errorOut)
|
||||
if sys.version_info < (3, 0) : # XXX: How to do this in Py3k ???
|
||||
sys.exc_traceback = sys.last_traceback = None
|
||||
|
||||
def _test_compare_function_exception(self) :
|
||||
self.startTest()
|
||||
def bad_comparator(l, r) :
|
||||
if l == r:
|
||||
# pass the set_dup_compare test
|
||||
return 0
|
||||
raise RuntimeError, "i'm a naughty comparison function"
|
||||
self.createDB(bad_comparator)
|
||||
#print "\n*** test should print 2 uncatchable tracebacks ***"
|
||||
self.addDataToDB(['a', 'b', 'c']) # this should raise, but...
|
||||
self.finishTest()
|
||||
|
||||
def test_compare_function_exception(self) :
|
||||
self.verifyStderr(
|
||||
self._test_compare_function_exception,
|
||||
re.compile('(^RuntimeError:.* naughty.*){2}', re.M|re.S)
|
||||
)
|
||||
|
||||
def _test_compare_function_bad_return(self) :
|
||||
self.startTest()
|
||||
def bad_comparator(l, r) :
|
||||
if l == r:
|
||||
# pass the set_dup_compare test
|
||||
return 0
|
||||
return l
|
||||
self.createDB(bad_comparator)
|
||||
#print "\n*** test should print 2 errors about returning an int ***"
|
||||
self.addDataToDB(['a', 'b', 'c']) # this should raise, but...
|
||||
self.finishTest()
|
||||
|
||||
def test_compare_function_bad_return(self) :
|
||||
self.verifyStderr(
|
||||
self._test_compare_function_bad_return,
|
||||
re.compile('(^TypeError:.* return an int.*){2}', re.M|re.S)
|
||||
)
|
||||
|
||||
|
||||
def test_cannot_assign_twice(self) :
|
||||
|
||||
def my_compare(a, b) :
|
||||
return 0
|
||||
|
||||
self.startTest()
|
||||
self.createDB(my_compare)
|
||||
self.assertRaises(RuntimeError, self.db.set_dup_compare, my_compare)
|
||||
|
||||
def test_suite() :
|
||||
res = unittest.TestSuite()
|
||||
|
||||
res.addTest(unittest.makeSuite(ComparatorTests))
|
||||
res.addTest(unittest.makeSuite(BtreeExceptionsTestCase))
|
||||
res.addTest(unittest.makeSuite(BtreeKeyCompareTestCase))
|
||||
res.addTest(unittest.makeSuite(DuplicateExceptionsTestCase))
|
||||
res.addTest(unittest.makeSuite(DuplicateCompareTestCase))
|
||||
return res
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest = 'suite')
|
||||
|
|
@ -1,184 +0,0 @@
|
|||
"""
|
||||
Test cases adapted from the test_bsddb.py module in Python's
|
||||
regression test suite.
|
||||
"""
|
||||
|
||||
import os, string
|
||||
import unittest
|
||||
|
||||
from test_all import db, hashopen, btopen, rnopen, verbose, \
|
||||
get_new_database_path
|
||||
|
||||
|
||||
class CompatibilityTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.filename = get_new_database_path()
|
||||
|
||||
def tearDown(self):
|
||||
try:
|
||||
os.remove(self.filename)
|
||||
except os.error:
|
||||
pass
|
||||
|
||||
|
||||
def test01_btopen(self):
|
||||
self.do_bthash_test(btopen, 'btopen')
|
||||
|
||||
def test02_hashopen(self):
|
||||
self.do_bthash_test(hashopen, 'hashopen')
|
||||
|
||||
def test03_rnopen(self):
|
||||
data = "The quick brown fox jumped over the lazy dog.".split()
|
||||
if verbose:
|
||||
print "\nTesting: rnopen"
|
||||
|
||||
f = rnopen(self.filename, 'c')
|
||||
for x in range(len(data)):
|
||||
f[x+1] = data[x]
|
||||
|
||||
getTest = (f[1], f[2], f[3])
|
||||
if verbose:
|
||||
print '%s %s %s' % getTest
|
||||
|
||||
self.assertEqual(getTest[1], 'quick', 'data mismatch!')
|
||||
|
||||
rv = f.set_location(3)
|
||||
if rv != (3, 'brown'):
|
||||
self.fail('recno database set_location failed: '+repr(rv))
|
||||
|
||||
f[25] = 'twenty-five'
|
||||
f.close()
|
||||
del f
|
||||
|
||||
f = rnopen(self.filename, 'w')
|
||||
f[20] = 'twenty'
|
||||
|
||||
def noRec(f):
|
||||
rec = f[15]
|
||||
self.assertRaises(KeyError, noRec, f)
|
||||
|
||||
def badKey(f):
|
||||
rec = f['a string']
|
||||
self.assertRaises(TypeError, badKey, f)
|
||||
|
||||
del f[3]
|
||||
|
||||
rec = f.first()
|
||||
while rec:
|
||||
if verbose:
|
||||
print rec
|
||||
try:
|
||||
rec = f.next()
|
||||
except KeyError:
|
||||
break
|
||||
|
||||
f.close()
|
||||
|
||||
|
||||
def test04_n_flag(self):
|
||||
f = hashopen(self.filename, 'n')
|
||||
f.close()
|
||||
|
||||
|
||||
def do_bthash_test(self, factory, what):
|
||||
if verbose:
|
||||
print '\nTesting: ', what
|
||||
|
||||
f = factory(self.filename, 'c')
|
||||
if verbose:
|
||||
print 'creation...'
|
||||
|
||||
# truth test
|
||||
if f:
|
||||
if verbose: print "truth test: true"
|
||||
else:
|
||||
if verbose: print "truth test: false"
|
||||
|
||||
f['0'] = ''
|
||||
f['a'] = 'Guido'
|
||||
f['b'] = 'van'
|
||||
f['c'] = 'Rossum'
|
||||
f['d'] = 'invented'
|
||||
# 'e' intentionally left out
|
||||
f['f'] = 'Python'
|
||||
if verbose:
|
||||
print '%s %s %s' % (f['a'], f['b'], f['c'])
|
||||
|
||||
if verbose:
|
||||
print 'key ordering...'
|
||||
start = f.set_location(f.first()[0])
|
||||
if start != ('0', ''):
|
||||
self.fail("incorrect first() result: "+repr(start))
|
||||
while 1:
|
||||
try:
|
||||
rec = f.next()
|
||||
except KeyError:
|
||||
self.assertEqual(rec, f.last(), 'Error, last <> last!')
|
||||
f.previous()
|
||||
break
|
||||
if verbose:
|
||||
print rec
|
||||
|
||||
self.assertTrue(f.has_key('f'), 'Error, missing key!')
|
||||
|
||||
# test that set_location() returns the next nearest key, value
|
||||
# on btree databases and raises KeyError on others.
|
||||
if factory == btopen:
|
||||
e = f.set_location('e')
|
||||
if e != ('f', 'Python'):
|
||||
self.fail('wrong key,value returned: '+repr(e))
|
||||
else:
|
||||
try:
|
||||
e = f.set_location('e')
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self.fail("set_location on non-existent key did not raise KeyError")
|
||||
|
||||
f.sync()
|
||||
f.close()
|
||||
# truth test
|
||||
try:
|
||||
if f:
|
||||
if verbose: print "truth test: true"
|
||||
else:
|
||||
if verbose: print "truth test: false"
|
||||
except db.DBError:
|
||||
pass
|
||||
else:
|
||||
self.fail("Exception expected")
|
||||
|
||||
del f
|
||||
|
||||
if verbose:
|
||||
print 'modification...'
|
||||
f = factory(self.filename, 'w')
|
||||
f['d'] = 'discovered'
|
||||
|
||||
if verbose:
|
||||
print 'access...'
|
||||
for key in f.keys():
|
||||
word = f[key]
|
||||
if verbose:
|
||||
print word
|
||||
|
||||
def noRec(f):
|
||||
rec = f['no such key']
|
||||
self.assertRaises(KeyError, noRec, f)
|
||||
|
||||
def badKey(f):
|
||||
rec = f[15]
|
||||
self.assertRaises(TypeError, badKey, f)
|
||||
|
||||
f.close()
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(CompatibilityTestCase)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import unittest
|
||||
import os, glob
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, \
|
||||
get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class pget_bugTestCase(unittest.TestCase):
|
||||
"""Verify that cursor.pget works properly"""
|
||||
db_name = 'test-cursor_pget.db'
|
||||
|
||||
def setUp(self):
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
self.primary_db = db.DB(self.env)
|
||||
self.primary_db.open(self.db_name, 'primary', db.DB_BTREE, db.DB_CREATE)
|
||||
self.secondary_db = db.DB(self.env)
|
||||
self.secondary_db.set_flags(db.DB_DUP)
|
||||
self.secondary_db.open(self.db_name, 'secondary', db.DB_BTREE, db.DB_CREATE)
|
||||
self.primary_db.associate(self.secondary_db, lambda key, data: data)
|
||||
self.primary_db.put('salad', 'eggs')
|
||||
self.primary_db.put('spam', 'ham')
|
||||
self.primary_db.put('omelet', 'eggs')
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
self.secondary_db.close()
|
||||
self.primary_db.close()
|
||||
self.env.close()
|
||||
del self.secondary_db
|
||||
del self.primary_db
|
||||
del self.env
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test_pget(self):
|
||||
cursor = self.secondary_db.cursor()
|
||||
|
||||
self.assertEqual(('eggs', 'salad', 'eggs'), cursor.pget(key='eggs', flags=db.DB_SET))
|
||||
self.assertEqual(('eggs', 'omelet', 'eggs'), cursor.pget(db.DB_NEXT_DUP))
|
||||
self.assertEqual(None, cursor.pget(db.DB_NEXT_DUP))
|
||||
|
||||
self.assertEqual(('ham', 'spam', 'ham'), cursor.pget('ham', 'spam', flags=db.DB_SET))
|
||||
self.assertEqual(None, cursor.pget(db.DB_NEXT_DUP))
|
||||
|
||||
cursor.close()
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(pget_bugTestCase)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,163 +0,0 @@
|
|||
import unittest
|
||||
import os, glob
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, \
|
||||
get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class DB(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.path = get_new_database_path()
|
||||
self.db = db.DB()
|
||||
|
||||
def tearDown(self):
|
||||
self.db.close()
|
||||
del self.db
|
||||
test_support.unlink(self.path)
|
||||
|
||||
class DB_general(DB) :
|
||||
def test_get_open_flags(self) :
|
||||
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE)
|
||||
self.assertEqual(db.DB_CREATE, self.db.get_open_flags())
|
||||
|
||||
def test_get_open_flags2(self) :
|
||||
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE |
|
||||
db.DB_THREAD)
|
||||
self.assertEqual(db.DB_CREATE | db.DB_THREAD, self.db.get_open_flags())
|
||||
|
||||
def test_get_dbname_filename(self) :
|
||||
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE)
|
||||
self.assertEqual((self.path, None), self.db.get_dbname())
|
||||
|
||||
def test_get_dbname_filename_database(self) :
|
||||
name = "jcea-random-name"
|
||||
self.db.open(self.path, dbname=name, dbtype=db.DB_HASH,
|
||||
flags = db.DB_CREATE)
|
||||
self.assertEqual((self.path, name), self.db.get_dbname())
|
||||
|
||||
def test_bt_minkey(self) :
|
||||
for i in [17, 108, 1030] :
|
||||
self.db.set_bt_minkey(i)
|
||||
self.assertEqual(i, self.db.get_bt_minkey())
|
||||
|
||||
def test_lorder(self) :
|
||||
self.db.set_lorder(1234)
|
||||
self.assertEqual(1234, self.db.get_lorder())
|
||||
self.db.set_lorder(4321)
|
||||
self.assertEqual(4321, self.db.get_lorder())
|
||||
self.assertRaises(db.DBInvalidArgError, self.db.set_lorder, 9182)
|
||||
|
||||
if db.version() >= (4, 6) :
|
||||
def test_priority(self) :
|
||||
flags = [db.DB_PRIORITY_VERY_LOW, db.DB_PRIORITY_LOW,
|
||||
db.DB_PRIORITY_DEFAULT, db.DB_PRIORITY_HIGH,
|
||||
db.DB_PRIORITY_VERY_HIGH]
|
||||
for flag in flags :
|
||||
self.db.set_priority(flag)
|
||||
self.assertEqual(flag, self.db.get_priority())
|
||||
|
||||
def test_get_transactional(self) :
|
||||
self.assertFalse(self.db.get_transactional())
|
||||
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE)
|
||||
self.assertFalse(self.db.get_transactional())
|
||||
|
||||
class DB_hash(DB) :
|
||||
def test_h_ffactor(self) :
|
||||
for ffactor in [4, 16, 256] :
|
||||
self.db.set_h_ffactor(ffactor)
|
||||
self.assertEqual(ffactor, self.db.get_h_ffactor())
|
||||
|
||||
def test_h_nelem(self) :
|
||||
for nelem in [1, 2, 4] :
|
||||
nelem = nelem*1024*1024 # Millions
|
||||
self.db.set_h_nelem(nelem)
|
||||
self.assertEqual(nelem, self.db.get_h_nelem())
|
||||
|
||||
def test_pagesize(self) :
|
||||
for i in xrange(9, 17) : # From 512 to 65536
|
||||
i = 1<<i
|
||||
self.db.set_pagesize(i)
|
||||
self.assertEqual(i, self.db.get_pagesize())
|
||||
|
||||
# The valid values goes from 512 to 65536
|
||||
# Test 131072 bytes...
|
||||
self.assertRaises(db.DBInvalidArgError, self.db.set_pagesize, 1<<17)
|
||||
# Test 256 bytes...
|
||||
self.assertRaises(db.DBInvalidArgError, self.db.set_pagesize, 1<<8)
|
||||
|
||||
class DB_txn(DB) :
|
||||
def setUp(self) :
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN)
|
||||
self.db = db.DB(self.env)
|
||||
|
||||
def tearDown(self) :
|
||||
self.db.close()
|
||||
del self.db
|
||||
self.env.close()
|
||||
del self.env
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test_flags(self) :
|
||||
self.db.set_flags(db.DB_CHKSUM)
|
||||
self.assertEqual(db.DB_CHKSUM, self.db.get_flags())
|
||||
self.db.set_flags(db.DB_TXN_NOT_DURABLE)
|
||||
self.assertEqual(db.DB_TXN_NOT_DURABLE | db.DB_CHKSUM,
|
||||
self.db.get_flags())
|
||||
|
||||
def test_get_transactional(self) :
|
||||
self.assertFalse(self.db.get_transactional())
|
||||
# DB_AUTO_COMMIT = Implicit transaction
|
||||
self.db.open("XXX", dbtype=db.DB_HASH,
|
||||
flags = db.DB_CREATE | db.DB_AUTO_COMMIT)
|
||||
self.assertTrue(self.db.get_transactional())
|
||||
|
||||
class DB_recno(DB) :
|
||||
def test_re_pad(self) :
|
||||
for i in [' ', '*'] : # Check chars
|
||||
self.db.set_re_pad(i)
|
||||
self.assertEqual(ord(i), self.db.get_re_pad())
|
||||
for i in [97, 65] : # Check integers
|
||||
self.db.set_re_pad(i)
|
||||
self.assertEqual(i, self.db.get_re_pad())
|
||||
|
||||
def test_re_delim(self) :
|
||||
for i in [' ', '*'] : # Check chars
|
||||
self.db.set_re_delim(i)
|
||||
self.assertEqual(ord(i), self.db.get_re_delim())
|
||||
for i in [97, 65] : # Check integers
|
||||
self.db.set_re_delim(i)
|
||||
self.assertEqual(i, self.db.get_re_delim())
|
||||
|
||||
def test_re_source(self) :
|
||||
for i in ["test", "test2", "test3"] :
|
||||
self.db.set_re_source(i)
|
||||
self.assertEqual(i, self.db.get_re_source())
|
||||
|
||||
class DB_queue(DB) :
|
||||
def test_re_len(self) :
|
||||
for i in [33, 65, 300, 2000] :
|
||||
self.db.set_re_len(i)
|
||||
self.assertEqual(i, self.db.get_re_len())
|
||||
|
||||
def test_q_extentsize(self) :
|
||||
for i in [1, 60, 100] :
|
||||
self.db.set_q_extentsize(i)
|
||||
self.assertEqual(i, self.db.get_q_extentsize())
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
suite.addTest(unittest.makeSuite(DB_general))
|
||||
suite.addTest(unittest.makeSuite(DB_txn))
|
||||
suite.addTest(unittest.makeSuite(DB_hash))
|
||||
suite.addTest(unittest.makeSuite(DB_recno))
|
||||
suite.addTest(unittest.makeSuite(DB_queue))
|
||||
|
||||
return suite
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,525 +0,0 @@
|
|||
import unittest
|
||||
import os, glob
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, \
|
||||
get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class DBEnv(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
|
||||
def tearDown(self):
|
||||
self.env.close()
|
||||
del self.env
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
class DBEnv_general(DBEnv) :
|
||||
def test_get_open_flags(self) :
|
||||
flags = db.DB_CREATE | db.DB_INIT_MPOOL
|
||||
self.env.open(self.homeDir, flags)
|
||||
self.assertEqual(flags, self.env.get_open_flags())
|
||||
|
||||
def test_get_open_flags2(self) :
|
||||
flags = db.DB_CREATE | db.DB_INIT_MPOOL | \
|
||||
db.DB_INIT_LOCK | db.DB_THREAD
|
||||
self.env.open(self.homeDir, flags)
|
||||
self.assertEqual(flags, self.env.get_open_flags())
|
||||
|
||||
if db.version() >= (4, 7) :
|
||||
def test_lk_partitions(self) :
|
||||
for i in [10, 20, 40] :
|
||||
self.env.set_lk_partitions(i)
|
||||
self.assertEqual(i, self.env.get_lk_partitions())
|
||||
|
||||
def test_getset_intermediate_dir_mode(self) :
|
||||
self.assertEqual(None, self.env.get_intermediate_dir_mode())
|
||||
for mode in ["rwx------", "rw-rw-rw-", "rw-r--r--"] :
|
||||
self.env.set_intermediate_dir_mode(mode)
|
||||
self.assertEqual(mode, self.env.get_intermediate_dir_mode())
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.set_intermediate_dir_mode, "abcde")
|
||||
|
||||
if db.version() >= (4, 6) :
|
||||
def test_thread(self) :
|
||||
for i in [16, 100, 1000] :
|
||||
self.env.set_thread_count(i)
|
||||
self.assertEqual(i, self.env.get_thread_count())
|
||||
|
||||
def test_cache_max(self) :
|
||||
for size in [64, 128] :
|
||||
size = size*1024*1024 # Megabytes
|
||||
self.env.set_cache_max(0, size)
|
||||
size2 = self.env.get_cache_max()
|
||||
self.assertEqual(0, size2[0])
|
||||
self.assertTrue(size <= size2[1])
|
||||
self.assertTrue(2*size > size2[1])
|
||||
|
||||
if db.version() >= (4, 4) :
|
||||
def test_mutex_stat(self) :
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOCK)
|
||||
stat = self.env.mutex_stat()
|
||||
self.assertTrue("mutex_inuse_max" in stat)
|
||||
|
||||
def test_lg_filemode(self) :
|
||||
for i in [0600, 0660, 0666] :
|
||||
self.env.set_lg_filemode(i)
|
||||
self.assertEqual(i, self.env.get_lg_filemode())
|
||||
|
||||
def test_mp_max_openfd(self) :
|
||||
for i in [17, 31, 42] :
|
||||
self.env.set_mp_max_openfd(i)
|
||||
self.assertEqual(i, self.env.get_mp_max_openfd())
|
||||
|
||||
def test_mp_max_write(self) :
|
||||
for i in [100, 200, 300] :
|
||||
for j in [1, 2, 3] :
|
||||
j *= 1000000
|
||||
self.env.set_mp_max_write(i, j)
|
||||
v=self.env.get_mp_max_write()
|
||||
self.assertEqual((i, j), v)
|
||||
|
||||
def test_invalid_txn(self) :
|
||||
# This environment doesn't support transactions
|
||||
self.assertRaises(db.DBInvalidArgError, self.env.txn_begin)
|
||||
|
||||
def test_mp_mmapsize(self) :
|
||||
for i in [16, 32, 64] :
|
||||
i *= 1024*1024
|
||||
self.env.set_mp_mmapsize(i)
|
||||
self.assertEqual(i, self.env.get_mp_mmapsize())
|
||||
|
||||
def test_tmp_dir(self) :
|
||||
for i in ["a", "bb", "ccc"] :
|
||||
self.env.set_tmp_dir(i)
|
||||
self.assertEqual(i, self.env.get_tmp_dir())
|
||||
|
||||
def test_flags(self) :
|
||||
self.env.set_flags(db.DB_AUTO_COMMIT, 1)
|
||||
self.assertEqual(db.DB_AUTO_COMMIT, self.env.get_flags())
|
||||
self.env.set_flags(db.DB_TXN_NOSYNC, 1)
|
||||
self.assertEqual(db.DB_AUTO_COMMIT | db.DB_TXN_NOSYNC,
|
||||
self.env.get_flags())
|
||||
self.env.set_flags(db.DB_AUTO_COMMIT, 0)
|
||||
self.assertEqual(db.DB_TXN_NOSYNC, self.env.get_flags())
|
||||
self.env.set_flags(db.DB_TXN_NOSYNC, 0)
|
||||
self.assertEqual(0, self.env.get_flags())
|
||||
|
||||
def test_lk_max_objects(self) :
|
||||
for i in [1000, 2000, 3000] :
|
||||
self.env.set_lk_max_objects(i)
|
||||
self.assertEqual(i, self.env.get_lk_max_objects())
|
||||
|
||||
def test_lk_max_locks(self) :
|
||||
for i in [1000, 2000, 3000] :
|
||||
self.env.set_lk_max_locks(i)
|
||||
self.assertEqual(i, self.env.get_lk_max_locks())
|
||||
|
||||
def test_lk_max_lockers(self) :
|
||||
for i in [1000, 2000, 3000] :
|
||||
self.env.set_lk_max_lockers(i)
|
||||
self.assertEqual(i, self.env.get_lk_max_lockers())
|
||||
|
||||
def test_lg_regionmax(self) :
|
||||
for i in [128, 256, 1000] :
|
||||
i = i*1024*1024
|
||||
self.env.set_lg_regionmax(i)
|
||||
j = self.env.get_lg_regionmax()
|
||||
self.assertTrue(i <= j)
|
||||
self.assertTrue(2*i > j)
|
||||
|
||||
def test_lk_detect(self) :
|
||||
flags= [db.DB_LOCK_DEFAULT, db.DB_LOCK_EXPIRE, db.DB_LOCK_MAXLOCKS,
|
||||
db.DB_LOCK_MINLOCKS, db.DB_LOCK_MINWRITE,
|
||||
db.DB_LOCK_OLDEST, db.DB_LOCK_RANDOM, db.DB_LOCK_YOUNGEST]
|
||||
|
||||
flags.append(db.DB_LOCK_MAXWRITE)
|
||||
|
||||
for i in flags :
|
||||
self.env.set_lk_detect(i)
|
||||
self.assertEqual(i, self.env.get_lk_detect())
|
||||
|
||||
def test_lg_dir(self) :
|
||||
for i in ["a", "bb", "ccc", "dddd"] :
|
||||
self.env.set_lg_dir(i)
|
||||
self.assertEqual(i, self.env.get_lg_dir())
|
||||
|
||||
def test_lg_bsize(self) :
|
||||
log_size = 70*1024
|
||||
self.env.set_lg_bsize(log_size)
|
||||
self.assertTrue(self.env.get_lg_bsize() >= log_size)
|
||||
self.assertTrue(self.env.get_lg_bsize() < 4*log_size)
|
||||
self.env.set_lg_bsize(4*log_size)
|
||||
self.assertTrue(self.env.get_lg_bsize() >= 4*log_size)
|
||||
|
||||
def test_setget_data_dirs(self) :
|
||||
dirs = ("a", "b", "c", "d")
|
||||
for i in dirs :
|
||||
self.env.set_data_dir(i)
|
||||
self.assertEqual(dirs, self.env.get_data_dirs())
|
||||
|
||||
def test_setget_cachesize(self) :
|
||||
cachesize = (0, 512*1024*1024, 3)
|
||||
self.env.set_cachesize(*cachesize)
|
||||
self.assertEqual(cachesize, self.env.get_cachesize())
|
||||
|
||||
cachesize = (0, 1*1024*1024, 5)
|
||||
self.env.set_cachesize(*cachesize)
|
||||
cachesize2 = self.env.get_cachesize()
|
||||
self.assertEqual(cachesize[0], cachesize2[0])
|
||||
self.assertEqual(cachesize[2], cachesize2[2])
|
||||
# Berkeley DB expands the cache 25% accounting overhead,
|
||||
# if the cache is small.
|
||||
self.assertEqual(125, int(100.0*cachesize2[1]/cachesize[1]))
|
||||
|
||||
# You can not change configuration after opening
|
||||
# the environment.
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
cachesize = (0, 2*1024*1024, 1)
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.set_cachesize, *cachesize)
|
||||
cachesize3 = self.env.get_cachesize()
|
||||
self.assertEqual(cachesize2[0], cachesize3[0])
|
||||
self.assertEqual(cachesize2[2], cachesize3[2])
|
||||
# In Berkeley DB 5.1, the cachesize can change when opening the Env
|
||||
self.assertTrue(cachesize2[1] <= cachesize3[1])
|
||||
|
||||
def test_set_cachesize_dbenv_db(self) :
|
||||
# You can not configure the cachesize using
|
||||
# the database handle, if you are using an environment.
|
||||
d = db.DB(self.env)
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
d.set_cachesize, 0, 1024*1024, 1)
|
||||
|
||||
def test_setget_shm_key(self) :
|
||||
shm_key=137
|
||||
self.env.set_shm_key(shm_key)
|
||||
self.assertEqual(shm_key, self.env.get_shm_key())
|
||||
self.env.set_shm_key(shm_key+1)
|
||||
self.assertEqual(shm_key+1, self.env.get_shm_key())
|
||||
|
||||
# You can not change configuration after opening
|
||||
# the environment.
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
# If we try to reconfigure cache after opening the
|
||||
# environment, core dump.
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.set_shm_key, shm_key)
|
||||
self.assertEqual(shm_key+1, self.env.get_shm_key())
|
||||
|
||||
if db.version() >= (4, 4) :
|
||||
def test_mutex_setget_max(self) :
|
||||
v = self.env.mutex_get_max()
|
||||
v2 = v*2+1
|
||||
|
||||
self.env.mutex_set_max(v2)
|
||||
self.assertEqual(v2, self.env.mutex_get_max())
|
||||
|
||||
self.env.mutex_set_max(v)
|
||||
self.assertEqual(v, self.env.mutex_get_max())
|
||||
|
||||
# You can not change configuration after opening
|
||||
# the environment.
|
||||
self.env.open(self.homeDir, db.DB_CREATE)
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.mutex_set_max, v2)
|
||||
|
||||
def test_mutex_setget_increment(self) :
|
||||
v = self.env.mutex_get_increment()
|
||||
v2 = 127
|
||||
|
||||
self.env.mutex_set_increment(v2)
|
||||
self.assertEqual(v2, self.env.mutex_get_increment())
|
||||
|
||||
self.env.mutex_set_increment(v)
|
||||
self.assertEqual(v, self.env.mutex_get_increment())
|
||||
|
||||
# You can not change configuration after opening
|
||||
# the environment.
|
||||
self.env.open(self.homeDir, db.DB_CREATE)
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.mutex_set_increment, v2)
|
||||
|
||||
def test_mutex_setget_tas_spins(self) :
|
||||
self.env.mutex_set_tas_spins(0) # Default = BDB decides
|
||||
v = self.env.mutex_get_tas_spins()
|
||||
v2 = v*2+1
|
||||
|
||||
self.env.mutex_set_tas_spins(v2)
|
||||
self.assertEqual(v2, self.env.mutex_get_tas_spins())
|
||||
|
||||
self.env.mutex_set_tas_spins(v)
|
||||
self.assertEqual(v, self.env.mutex_get_tas_spins())
|
||||
|
||||
# In this case, you can change configuration
|
||||
# after opening the environment.
|
||||
self.env.open(self.homeDir, db.DB_CREATE)
|
||||
self.env.mutex_set_tas_spins(v2)
|
||||
|
||||
def test_mutex_setget_align(self) :
|
||||
v = self.env.mutex_get_align()
|
||||
v2 = 64
|
||||
if v == 64 :
|
||||
v2 = 128
|
||||
|
||||
self.env.mutex_set_align(v2)
|
||||
self.assertEqual(v2, self.env.mutex_get_align())
|
||||
|
||||
# Requires a nonzero power of two
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.mutex_set_align, 0)
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.mutex_set_align, 17)
|
||||
|
||||
self.env.mutex_set_align(2*v2)
|
||||
self.assertEqual(2*v2, self.env.mutex_get_align())
|
||||
|
||||
# You can not change configuration after opening
|
||||
# the environment.
|
||||
self.env.open(self.homeDir, db.DB_CREATE)
|
||||
self.assertRaises(db.DBInvalidArgError,
|
||||
self.env.mutex_set_align, v2)
|
||||
|
||||
|
||||
class DBEnv_log(DBEnv) :
|
||||
def setUp(self):
|
||||
DBEnv.setUp(self)
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL | db.DB_INIT_LOG)
|
||||
|
||||
def test_log_file(self) :
|
||||
log_file = self.env.log_file((1, 1))
|
||||
self.assertEqual("log.0000000001", log_file[-14:])
|
||||
|
||||
if db.version() >= (4, 4) :
|
||||
# The version with transactions is checked in other test object
|
||||
def test_log_printf(self) :
|
||||
msg = "This is a test..."
|
||||
self.env.log_printf(msg)
|
||||
logc = self.env.log_cursor()
|
||||
self.assertTrue(msg in (logc.last()[1]))
|
||||
|
||||
if db.version() >= (4, 7) :
|
||||
def test_log_config(self) :
|
||||
self.env.log_set_config(db.DB_LOG_DSYNC | db.DB_LOG_ZERO, 1)
|
||||
self.assertTrue(self.env.log_get_config(db.DB_LOG_DSYNC))
|
||||
self.assertTrue(self.env.log_get_config(db.DB_LOG_ZERO))
|
||||
self.env.log_set_config(db.DB_LOG_ZERO, 0)
|
||||
self.assertTrue(self.env.log_get_config(db.DB_LOG_DSYNC))
|
||||
self.assertFalse(self.env.log_get_config(db.DB_LOG_ZERO))
|
||||
|
||||
|
||||
class DBEnv_log_txn(DBEnv) :
|
||||
def setUp(self):
|
||||
DBEnv.setUp(self)
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN)
|
||||
|
||||
if (db.version() >= (4, 5)) and (db.version() < (5, 2)) :
|
||||
def test_tx_max(self) :
|
||||
txns=[]
|
||||
def tx() :
|
||||
for i in xrange(self.env.get_tx_max()) :
|
||||
txns.append(self.env.txn_begin())
|
||||
|
||||
tx()
|
||||
self.assertRaises(MemoryError, tx)
|
||||
|
||||
# Abort the transactions before garbage collection,
|
||||
# to avoid "warnings".
|
||||
for i in txns :
|
||||
i.abort()
|
||||
|
||||
if db.version() >= (4, 4) :
|
||||
# The version without transactions is checked in other test object
|
||||
def test_log_printf(self) :
|
||||
msg = "This is a test..."
|
||||
txn = self.env.txn_begin()
|
||||
self.env.log_printf(msg, txn=txn)
|
||||
txn.commit()
|
||||
logc = self.env.log_cursor()
|
||||
logc.last() # Skip the commit
|
||||
self.assertTrue(msg in (logc.prev()[1]))
|
||||
|
||||
msg = "This is another test..."
|
||||
txn = self.env.txn_begin()
|
||||
self.env.log_printf(msg, txn=txn)
|
||||
txn.abort() # Do not store the new message
|
||||
logc.last() # Skip the abort
|
||||
self.assertTrue(msg not in (logc.prev()[1]))
|
||||
|
||||
msg = "This is a third test..."
|
||||
txn = self.env.txn_begin()
|
||||
self.env.log_printf(msg, txn=txn)
|
||||
txn.commit() # Do not store the new message
|
||||
logc.last() # Skip the commit
|
||||
self.assertTrue(msg in (logc.prev()[1]))
|
||||
|
||||
|
||||
class DBEnv_memp(DBEnv):
|
||||
def setUp(self):
|
||||
DBEnv.setUp(self)
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL | db.DB_INIT_LOG)
|
||||
self.db = db.DB(self.env)
|
||||
self.db.open("test", db.DB_HASH, db.DB_CREATE, 0660)
|
||||
|
||||
def tearDown(self):
|
||||
self.db.close()
|
||||
del self.db
|
||||
DBEnv.tearDown(self)
|
||||
|
||||
def test_memp_1_trickle(self) :
|
||||
self.db.put("hi", "bye")
|
||||
self.assertTrue(self.env.memp_trickle(100) > 0)
|
||||
|
||||
# Preserve the order, do "memp_trickle" test first
|
||||
def test_memp_2_sync(self) :
|
||||
self.db.put("hi", "bye")
|
||||
self.env.memp_sync() # Full flush
|
||||
# Nothing to do...
|
||||
self.assertTrue(self.env.memp_trickle(100) == 0)
|
||||
|
||||
self.db.put("hi", "bye2")
|
||||
self.env.memp_sync((1, 0)) # NOP, probably
|
||||
# Something to do... or not
|
||||
self.assertTrue(self.env.memp_trickle(100) >= 0)
|
||||
|
||||
self.db.put("hi", "bye3")
|
||||
self.env.memp_sync((123, 99)) # Full flush
|
||||
# Nothing to do...
|
||||
self.assertTrue(self.env.memp_trickle(100) == 0)
|
||||
|
||||
def test_memp_stat_1(self) :
|
||||
stats = self.env.memp_stat() # No param
|
||||
self.assertTrue(len(stats)==2)
|
||||
self.assertTrue("cache_miss" in stats[0])
|
||||
stats = self.env.memp_stat(db.DB_STAT_CLEAR) # Positional param
|
||||
self.assertTrue("cache_miss" in stats[0])
|
||||
stats = self.env.memp_stat(flags=0) # Keyword param
|
||||
self.assertTrue("cache_miss" in stats[0])
|
||||
|
||||
def test_memp_stat_2(self) :
|
||||
stats=self.env.memp_stat()[1]
|
||||
self.assertTrue(len(stats))==1
|
||||
self.assertTrue("test" in stats)
|
||||
self.assertTrue("page_in" in stats["test"])
|
||||
|
||||
class DBEnv_logcursor(DBEnv):
|
||||
def setUp(self):
|
||||
DBEnv.setUp(self)
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN)
|
||||
txn = self.env.txn_begin()
|
||||
self.db = db.DB(self.env)
|
||||
self.db.open("test", db.DB_HASH, db.DB_CREATE, 0660, txn=txn)
|
||||
txn.commit()
|
||||
for i in ["2", "8", "20"] :
|
||||
txn = self.env.txn_begin()
|
||||
self.db.put(key = i, data = i*int(i), txn=txn)
|
||||
txn.commit()
|
||||
|
||||
def tearDown(self):
|
||||
self.db.close()
|
||||
del self.db
|
||||
DBEnv.tearDown(self)
|
||||
|
||||
def _check_return(self, value) :
|
||||
self.assertTrue(isinstance(value, tuple))
|
||||
self.assertEqual(len(value), 2)
|
||||
self.assertTrue(isinstance(value[0], tuple))
|
||||
self.assertEqual(len(value[0]), 2)
|
||||
self.assertTrue(isinstance(value[0][0], int))
|
||||
self.assertTrue(isinstance(value[0][1], int))
|
||||
self.assertTrue(isinstance(value[1], str))
|
||||
|
||||
# Preserve test order
|
||||
def test_1_first(self) :
|
||||
logc = self.env.log_cursor()
|
||||
v = logc.first()
|
||||
self._check_return(v)
|
||||
self.assertTrue((1, 1) < v[0])
|
||||
self.assertTrue(len(v[1])>0)
|
||||
|
||||
def test_2_last(self) :
|
||||
logc = self.env.log_cursor()
|
||||
lsn_first = logc.first()[0]
|
||||
v = logc.last()
|
||||
self._check_return(v)
|
||||
self.assertTrue(lsn_first < v[0])
|
||||
|
||||
def test_3_next(self) :
|
||||
logc = self.env.log_cursor()
|
||||
lsn_last = logc.last()[0]
|
||||
self.assertEqual(logc.next(), None)
|
||||
lsn_first = logc.first()[0]
|
||||
v = logc.next()
|
||||
self._check_return(v)
|
||||
self.assertTrue(lsn_first < v[0])
|
||||
self.assertTrue(lsn_last > v[0])
|
||||
|
||||
v2 = logc.next()
|
||||
self.assertTrue(v2[0] > v[0])
|
||||
self.assertTrue(lsn_last > v2[0])
|
||||
|
||||
v3 = logc.next()
|
||||
self.assertTrue(v3[0] > v2[0])
|
||||
self.assertTrue(lsn_last > v3[0])
|
||||
|
||||
def test_4_prev(self) :
|
||||
logc = self.env.log_cursor()
|
||||
lsn_first = logc.first()[0]
|
||||
self.assertEqual(logc.prev(), None)
|
||||
lsn_last = logc.last()[0]
|
||||
v = logc.prev()
|
||||
self._check_return(v)
|
||||
self.assertTrue(lsn_first < v[0])
|
||||
self.assertTrue(lsn_last > v[0])
|
||||
|
||||
v2 = logc.prev()
|
||||
self.assertTrue(v2[0] < v[0])
|
||||
self.assertTrue(lsn_first < v2[0])
|
||||
|
||||
v3 = logc.prev()
|
||||
self.assertTrue(v3[0] < v2[0])
|
||||
self.assertTrue(lsn_first < v3[0])
|
||||
|
||||
def test_5_current(self) :
|
||||
logc = self.env.log_cursor()
|
||||
logc.first()
|
||||
v = logc.next()
|
||||
self.assertEqual(v, logc.current())
|
||||
|
||||
def test_6_set(self) :
|
||||
logc = self.env.log_cursor()
|
||||
logc.first()
|
||||
v = logc.next()
|
||||
self.assertNotEqual(v, logc.next())
|
||||
self.assertNotEqual(v, logc.next())
|
||||
self.assertEqual(v, logc.set(v[0]))
|
||||
|
||||
def test_explicit_close(self) :
|
||||
logc = self.env.log_cursor()
|
||||
logc.close()
|
||||
self.assertRaises(db.DBCursorClosedError, logc.next)
|
||||
|
||||
def test_implicit_close(self) :
|
||||
logc = [self.env.log_cursor() for i in xrange(10)]
|
||||
self.env.close() # This close should close too all its tree
|
||||
for i in logc :
|
||||
self.assertRaises(db.DBCursorClosedError, i.next)
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
suite.addTest(unittest.makeSuite(DBEnv_general))
|
||||
suite.addTest(unittest.makeSuite(DBEnv_memp))
|
||||
suite.addTest(unittest.makeSuite(DBEnv_logcursor))
|
||||
suite.addTest(unittest.makeSuite(DBEnv_log))
|
||||
suite.addTest(unittest.makeSuite(DBEnv_log_txn))
|
||||
|
||||
return suite
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
|
||||
import os, string
|
||||
import unittest
|
||||
|
||||
from test_all import db, dbobj, test_support, get_new_environment_path, \
|
||||
get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class dbobjTestCase(unittest.TestCase):
|
||||
"""Verify that dbobj.DB and dbobj.DBEnv work properly"""
|
||||
db_name = 'test-dbobj.db'
|
||||
|
||||
def setUp(self):
|
||||
self.homeDir = get_new_environment_path()
|
||||
|
||||
def tearDown(self):
|
||||
if hasattr(self, 'db'):
|
||||
del self.db
|
||||
if hasattr(self, 'env'):
|
||||
del self.env
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test01_both(self):
|
||||
class TestDBEnv(dbobj.DBEnv): pass
|
||||
class TestDB(dbobj.DB):
|
||||
def put(self, key, *args, **kwargs):
|
||||
key = key.upper()
|
||||
# call our parent classes put method with an upper case key
|
||||
return dbobj.DB.put(self, key, *args, **kwargs)
|
||||
self.env = TestDBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
self.db = TestDB(self.env)
|
||||
self.db.open(self.db_name, db.DB_HASH, db.DB_CREATE)
|
||||
self.db.put('spam', 'eggs')
|
||||
self.assertEqual(self.db.get('spam'), None,
|
||||
"overridden dbobj.DB.put() method failed [1]")
|
||||
self.assertEqual(self.db.get('SPAM'), 'eggs',
|
||||
"overridden dbobj.DB.put() method failed [2]")
|
||||
self.db.close()
|
||||
self.env.close()
|
||||
|
||||
def test02_dbobj_dict_interface(self):
|
||||
self.env = dbobj.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
self.db = dbobj.DB(self.env)
|
||||
self.db.open(self.db_name+'02', db.DB_HASH, db.DB_CREATE)
|
||||
# __setitem__
|
||||
self.db['spam'] = 'eggs'
|
||||
# __len__
|
||||
self.assertEqual(len(self.db), 1)
|
||||
# __getitem__
|
||||
self.assertEqual(self.db['spam'], 'eggs')
|
||||
# __del__
|
||||
del self.db['spam']
|
||||
self.assertEqual(self.db.get('spam'), None, "dbobj __del__ failed")
|
||||
self.db.close()
|
||||
self.env.close()
|
||||
|
||||
def test03_dbobj_type_before_open(self):
|
||||
# Ensure this doesn't cause a segfault.
|
||||
self.assertRaises(db.DBInvalidArgError, db.DB().type)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(dbobjTestCase)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,398 +0,0 @@
|
|||
"""
|
||||
TestCases for checking dbShelve objects.
|
||||
"""
|
||||
|
||||
import os, string, sys
|
||||
import random
|
||||
import unittest
|
||||
|
||||
|
||||
from test_all import db, dbshelve, test_support, verbose, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
# We want the objects to be comparable so we can test dbshelve.values
|
||||
# later on.
|
||||
class DataClass:
|
||||
def __init__(self):
|
||||
self.value = random.random()
|
||||
|
||||
def __repr__(self) : # For Python 3.0 comparison
|
||||
return "DataClass %f" %self.value
|
||||
|
||||
def __cmp__(self, other): # For Python 2.x comparison
|
||||
return cmp(self.value, other)
|
||||
|
||||
|
||||
class DBShelveTestCase(unittest.TestCase):
|
||||
if (sys.version_info < (2, 7)) or ((sys.version_info >= (3, 0)) and
|
||||
(sys.version_info < (3, 2))) :
|
||||
def assertIn(self, a, b, msg=None) :
|
||||
return self.assertTrue(a in b, msg=msg)
|
||||
|
||||
|
||||
def setUp(self):
|
||||
if sys.version_info[0] >= 3 :
|
||||
from test_all import do_proxy_db_py3k
|
||||
self._flag_proxy_db_py3k = do_proxy_db_py3k(False)
|
||||
self.filename = get_new_database_path()
|
||||
self.do_open()
|
||||
|
||||
def tearDown(self):
|
||||
if sys.version_info[0] >= 3 :
|
||||
from test_all import do_proxy_db_py3k
|
||||
do_proxy_db_py3k(self._flag_proxy_db_py3k)
|
||||
self.do_close()
|
||||
test_support.unlink(self.filename)
|
||||
|
||||
def mk(self, key):
|
||||
"""Turn key into an appropriate key type for this db"""
|
||||
# override in child class for RECNO
|
||||
if sys.version_info[0] < 3 :
|
||||
return key
|
||||
else :
|
||||
return bytes(key, "iso8859-1") # 8 bits
|
||||
|
||||
def populateDB(self, d):
|
||||
for x in string.letters:
|
||||
d[self.mk('S' + x)] = 10 * x # add a string
|
||||
d[self.mk('I' + x)] = ord(x) # add an integer
|
||||
d[self.mk('L' + x)] = [x] * 10 # add a list
|
||||
|
||||
inst = DataClass() # add an instance
|
||||
inst.S = 10 * x
|
||||
inst.I = ord(x)
|
||||
inst.L = [x] * 10
|
||||
d[self.mk('O' + x)] = inst
|
||||
|
||||
|
||||
# overridable in derived classes to affect how the shelf is created/opened
|
||||
def do_open(self):
|
||||
self.d = dbshelve.open(self.filename)
|
||||
|
||||
# and closed...
|
||||
def do_close(self):
|
||||
self.d.close()
|
||||
|
||||
|
||||
|
||||
def test01_basics(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test01_basics..." % self.__class__.__name__
|
||||
|
||||
self.populateDB(self.d)
|
||||
self.d.sync()
|
||||
self.do_close()
|
||||
self.do_open()
|
||||
d = self.d
|
||||
|
||||
l = len(d)
|
||||
k = d.keys()
|
||||
s = d.stat()
|
||||
f = d.fd()
|
||||
|
||||
if verbose:
|
||||
print "length:", l
|
||||
print "keys:", k
|
||||
print "stats:", s
|
||||
|
||||
self.assertEqual(0, d.has_key(self.mk('bad key')))
|
||||
self.assertEqual(1, d.has_key(self.mk('IA')))
|
||||
self.assertEqual(1, d.has_key(self.mk('OA')))
|
||||
|
||||
d.delete(self.mk('IA'))
|
||||
del d[self.mk('OA')]
|
||||
self.assertEqual(0, d.has_key(self.mk('IA')))
|
||||
self.assertEqual(0, d.has_key(self.mk('OA')))
|
||||
self.assertEqual(len(d), l-2)
|
||||
|
||||
values = []
|
||||
for key in d.keys():
|
||||
value = d[key]
|
||||
values.append(value)
|
||||
if verbose:
|
||||
print "%s: %s" % (key, value)
|
||||
self.checkrec(key, value)
|
||||
|
||||
dbvalues = d.values()
|
||||
self.assertEqual(len(dbvalues), len(d.keys()))
|
||||
if sys.version_info < (2, 6) :
|
||||
values.sort()
|
||||
dbvalues.sort()
|
||||
self.assertEqual(values, dbvalues)
|
||||
else : # XXX: Convert all to strings. Please, improve
|
||||
values.sort(key=lambda x : str(x))
|
||||
dbvalues.sort(key=lambda x : str(x))
|
||||
self.assertEqual(repr(values), repr(dbvalues))
|
||||
|
||||
items = d.items()
|
||||
self.assertEqual(len(items), len(values))
|
||||
|
||||
for key, value in items:
|
||||
self.checkrec(key, value)
|
||||
|
||||
self.assertEqual(d.get(self.mk('bad key')), None)
|
||||
self.assertEqual(d.get(self.mk('bad key'), None), None)
|
||||
self.assertEqual(d.get(self.mk('bad key'), 'a string'), 'a string')
|
||||
self.assertEqual(d.get(self.mk('bad key'), [1, 2, 3]), [1, 2, 3])
|
||||
|
||||
d.set_get_returns_none(0)
|
||||
self.assertRaises(db.DBNotFoundError, d.get, self.mk('bad key'))
|
||||
d.set_get_returns_none(1)
|
||||
|
||||
d.put(self.mk('new key'), 'new data')
|
||||
self.assertEqual(d.get(self.mk('new key')), 'new data')
|
||||
self.assertEqual(d[self.mk('new key')], 'new data')
|
||||
|
||||
|
||||
|
||||
def test02_cursors(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test02_cursors..." % self.__class__.__name__
|
||||
|
||||
self.populateDB(self.d)
|
||||
d = self.d
|
||||
|
||||
count = 0
|
||||
c = d.cursor()
|
||||
rec = c.first()
|
||||
while rec is not None:
|
||||
count = count + 1
|
||||
if verbose:
|
||||
print rec
|
||||
key, value = rec
|
||||
self.checkrec(key, value)
|
||||
# Hack to avoid conversion by 2to3 tool
|
||||
rec = getattr(c, "next")()
|
||||
del c
|
||||
|
||||
self.assertEqual(count, len(d))
|
||||
|
||||
count = 0
|
||||
c = d.cursor()
|
||||
rec = c.last()
|
||||
while rec is not None:
|
||||
count = count + 1
|
||||
if verbose:
|
||||
print rec
|
||||
key, value = rec
|
||||
self.checkrec(key, value)
|
||||
rec = c.prev()
|
||||
|
||||
self.assertEqual(count, len(d))
|
||||
|
||||
c.set(self.mk('SS'))
|
||||
key, value = c.current()
|
||||
self.checkrec(key, value)
|
||||
del c
|
||||
|
||||
|
||||
def test03_append(self):
|
||||
# NOTE: this is overridden in RECNO subclass, don't change its name.
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test03_append..." % self.__class__.__name__
|
||||
|
||||
self.assertRaises(dbshelve.DBShelveError,
|
||||
self.d.append, 'unit test was here')
|
||||
|
||||
|
||||
def test04_iterable(self) :
|
||||
self.populateDB(self.d)
|
||||
d = self.d
|
||||
keys = d.keys()
|
||||
keyset = set(keys)
|
||||
self.assertEqual(len(keyset), len(keys))
|
||||
|
||||
for key in d :
|
||||
self.assertIn(key, keyset)
|
||||
keyset.remove(key)
|
||||
self.assertEqual(len(keyset), 0)
|
||||
|
||||
def checkrec(self, key, value):
|
||||
# override this in a subclass if the key type is different
|
||||
|
||||
if sys.version_info[0] >= 3 :
|
||||
if isinstance(key, bytes) :
|
||||
key = key.decode("iso8859-1") # 8 bits
|
||||
|
||||
x = key[1]
|
||||
if key[0] == 'S':
|
||||
self.assertEqual(type(value), str)
|
||||
self.assertEqual(value, 10 * x)
|
||||
|
||||
elif key[0] == 'I':
|
||||
self.assertEqual(type(value), int)
|
||||
self.assertEqual(value, ord(x))
|
||||
|
||||
elif key[0] == 'L':
|
||||
self.assertEqual(type(value), list)
|
||||
self.assertEqual(value, [x] * 10)
|
||||
|
||||
elif key[0] == 'O':
|
||||
if sys.version_info[0] < 3 :
|
||||
from types import InstanceType
|
||||
self.assertEqual(type(value), InstanceType)
|
||||
else :
|
||||
self.assertEqual(type(value), DataClass)
|
||||
|
||||
self.assertEqual(value.S, 10 * x)
|
||||
self.assertEqual(value.I, ord(x))
|
||||
self.assertEqual(value.L, [x] * 10)
|
||||
|
||||
else:
|
||||
self.assertTrue(0, 'Unknown key type, fix the test')
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class BasicShelveTestCase(DBShelveTestCase):
|
||||
def do_open(self):
|
||||
self.d = dbshelve.DBShelf()
|
||||
self.d.open(self.filename, self.dbtype, self.dbflags)
|
||||
|
||||
def do_close(self):
|
||||
self.d.close()
|
||||
|
||||
|
||||
class BTreeShelveTestCase(BasicShelveTestCase):
|
||||
dbtype = db.DB_BTREE
|
||||
dbflags = db.DB_CREATE
|
||||
|
||||
|
||||
class HashShelveTestCase(BasicShelveTestCase):
|
||||
dbtype = db.DB_HASH
|
||||
dbflags = db.DB_CREATE
|
||||
|
||||
|
||||
class ThreadBTreeShelveTestCase(BasicShelveTestCase):
|
||||
dbtype = db.DB_BTREE
|
||||
dbflags = db.DB_CREATE | db.DB_THREAD
|
||||
|
||||
|
||||
class ThreadHashShelveTestCase(BasicShelveTestCase):
|
||||
dbtype = db.DB_HASH
|
||||
dbflags = db.DB_CREATE | db.DB_THREAD
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class BasicEnvShelveTestCase(DBShelveTestCase):
|
||||
def do_open(self):
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir,
|
||||
self.envflags | db.DB_INIT_MPOOL | db.DB_CREATE)
|
||||
|
||||
self.filename = os.path.split(self.filename)[1]
|
||||
self.d = dbshelve.DBShelf(self.env)
|
||||
self.d.open(self.filename, self.dbtype, self.dbflags)
|
||||
|
||||
|
||||
def do_close(self):
|
||||
self.d.close()
|
||||
self.env.close()
|
||||
|
||||
|
||||
def setUp(self) :
|
||||
self.homeDir = get_new_environment_path()
|
||||
DBShelveTestCase.setUp(self)
|
||||
|
||||
def tearDown(self):
|
||||
if sys.version_info[0] >= 3 :
|
||||
from test_all import do_proxy_db_py3k
|
||||
do_proxy_db_py3k(self._flag_proxy_db_py3k)
|
||||
self.do_close()
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
|
||||
class EnvBTreeShelveTestCase(BasicEnvShelveTestCase):
|
||||
envflags = 0
|
||||
dbtype = db.DB_BTREE
|
||||
dbflags = db.DB_CREATE
|
||||
|
||||
|
||||
class EnvHashShelveTestCase(BasicEnvShelveTestCase):
|
||||
envflags = 0
|
||||
dbtype = db.DB_HASH
|
||||
dbflags = db.DB_CREATE
|
||||
|
||||
|
||||
class EnvThreadBTreeShelveTestCase(BasicEnvShelveTestCase):
|
||||
envflags = db.DB_THREAD
|
||||
dbtype = db.DB_BTREE
|
||||
dbflags = db.DB_CREATE | db.DB_THREAD
|
||||
|
||||
|
||||
class EnvThreadHashShelveTestCase(BasicEnvShelveTestCase):
|
||||
envflags = db.DB_THREAD
|
||||
dbtype = db.DB_HASH
|
||||
dbflags = db.DB_CREATE | db.DB_THREAD
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# test cases for a DBShelf in a RECNO DB.
|
||||
|
||||
class RecNoShelveTestCase(BasicShelveTestCase):
|
||||
dbtype = db.DB_RECNO
|
||||
dbflags = db.DB_CREATE
|
||||
|
||||
def setUp(self):
|
||||
BasicShelveTestCase.setUp(self)
|
||||
|
||||
# pool to assign integer key values out of
|
||||
self.key_pool = list(range(1, 5000))
|
||||
self.key_map = {} # map string keys to the number we gave them
|
||||
self.intkey_map = {} # reverse map of above
|
||||
|
||||
def mk(self, key):
|
||||
if key not in self.key_map:
|
||||
self.key_map[key] = self.key_pool.pop(0)
|
||||
self.intkey_map[self.key_map[key]] = key
|
||||
return self.key_map[key]
|
||||
|
||||
def checkrec(self, intkey, value):
|
||||
key = self.intkey_map[intkey]
|
||||
BasicShelveTestCase.checkrec(self, key, value)
|
||||
|
||||
def test03_append(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test03_append..." % self.__class__.__name__
|
||||
|
||||
self.d[1] = 'spam'
|
||||
self.d[5] = 'eggs'
|
||||
self.assertEqual(6, self.d.append('spam'))
|
||||
self.assertEqual(7, self.d.append('baked beans'))
|
||||
self.assertEqual('spam', self.d.get(6))
|
||||
self.assertEqual('spam', self.d.get(1))
|
||||
self.assertEqual('baked beans', self.d.get(7))
|
||||
self.assertEqual('eggs', self.d.get(5))
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
suite.addTest(unittest.makeSuite(DBShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(BTreeShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(HashShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(ThreadBTreeShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(ThreadHashShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(EnvBTreeShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(EnvHashShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(EnvThreadBTreeShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(EnvThreadHashShelveTestCase))
|
||||
suite.addTest(unittest.makeSuite(RecNoShelveTestCase))
|
||||
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,409 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
#-----------------------------------------------------------------------
|
||||
# A test suite for the table interface built on bsddb.db
|
||||
#-----------------------------------------------------------------------
|
||||
#
|
||||
# Copyright (C) 2000, 2001 by Autonomous Zone Industries
|
||||
# Copyright (C) 2002 Gregory P. Smith
|
||||
#
|
||||
# March 20, 2000
|
||||
#
|
||||
# License: This is free software. You may use this software for any
|
||||
# purpose including modification/redistribution, so long as
|
||||
# this header remains intact and that you do not claim any
|
||||
# rights of ownership or authorship of this software. This
|
||||
# software has been tested, but no warranty is expressed or
|
||||
# implied.
|
||||
#
|
||||
# -- Gregory P. Smith <greg@krypto.org>
|
||||
#
|
||||
# $Id$
|
||||
|
||||
import os, re, sys
|
||||
|
||||
if sys.version_info[0] < 3 :
|
||||
try:
|
||||
import cPickle
|
||||
pickle = cPickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
else :
|
||||
import pickle
|
||||
|
||||
import unittest
|
||||
from test_all import db, dbtables, test_support, verbose, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class TableDBTestCase(unittest.TestCase):
|
||||
db_name = 'test-table.db'
|
||||
|
||||
def setUp(self):
|
||||
import sys
|
||||
if sys.version_info[0] >= 3 :
|
||||
from test_all import do_proxy_db_py3k
|
||||
self._flag_proxy_db_py3k = do_proxy_db_py3k(False)
|
||||
|
||||
self.testHomeDir = get_new_environment_path()
|
||||
self.tdb = dbtables.bsdTableDB(
|
||||
filename='tabletest.db', dbhome=self.testHomeDir, create=1)
|
||||
|
||||
def tearDown(self):
|
||||
self.tdb.close()
|
||||
import sys
|
||||
if sys.version_info[0] >= 3 :
|
||||
from test_all import do_proxy_db_py3k
|
||||
do_proxy_db_py3k(self._flag_proxy_db_py3k)
|
||||
test_support.rmtree(self.testHomeDir)
|
||||
|
||||
def test01(self):
|
||||
tabname = "test01"
|
||||
colname = 'cool numbers'
|
||||
try:
|
||||
self.tdb.Drop(tabname)
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
self.tdb.CreateTable(tabname, [colname])
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
self.tdb.Insert(tabname, {colname: pickle.dumps(3.14159, 1)})
|
||||
else :
|
||||
self.tdb.Insert(tabname, {colname: pickle.dumps(3.14159,
|
||||
1).decode("iso8859-1")}) # 8 bits
|
||||
|
||||
if verbose:
|
||||
self.tdb._db_print()
|
||||
|
||||
values = self.tdb.Select(
|
||||
tabname, [colname], conditions={colname: None})
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
colval = pickle.loads(values[0][colname])
|
||||
else :
|
||||
colval = pickle.loads(bytes(values[0][colname], "iso8859-1"))
|
||||
self.assertTrue(colval > 3.141)
|
||||
self.assertTrue(colval < 3.142)
|
||||
|
||||
|
||||
def test02(self):
|
||||
tabname = "test02"
|
||||
col0 = 'coolness factor'
|
||||
col1 = 'but can it fly?'
|
||||
col2 = 'Species'
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
testinfo = [
|
||||
{col0: pickle.dumps(8, 1), col1: 'no', col2: 'Penguin'},
|
||||
{col0: pickle.dumps(-1, 1), col1: 'no', col2: 'Turkey'},
|
||||
{col0: pickle.dumps(9, 1), col1: 'yes', col2: 'SR-71A Blackbird'}
|
||||
]
|
||||
else :
|
||||
testinfo = [
|
||||
{col0: pickle.dumps(8, 1).decode("iso8859-1"),
|
||||
col1: 'no', col2: 'Penguin'},
|
||||
{col0: pickle.dumps(-1, 1).decode("iso8859-1"),
|
||||
col1: 'no', col2: 'Turkey'},
|
||||
{col0: pickle.dumps(9, 1).decode("iso8859-1"),
|
||||
col1: 'yes', col2: 'SR-71A Blackbird'}
|
||||
]
|
||||
|
||||
try:
|
||||
self.tdb.Drop(tabname)
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
self.tdb.CreateTable(tabname, [col0, col1, col2])
|
||||
for row in testinfo :
|
||||
self.tdb.Insert(tabname, row)
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
values = self.tdb.Select(tabname, [col2],
|
||||
conditions={col0: lambda x: pickle.loads(x) >= 8})
|
||||
else :
|
||||
values = self.tdb.Select(tabname, [col2],
|
||||
conditions={col0: lambda x:
|
||||
pickle.loads(bytes(x, "iso8859-1")) >= 8})
|
||||
|
||||
self.assertEqual(len(values), 2)
|
||||
if values[0]['Species'] == 'Penguin' :
|
||||
self.assertEqual(values[1]['Species'], 'SR-71A Blackbird')
|
||||
elif values[0]['Species'] == 'SR-71A Blackbird' :
|
||||
self.assertEqual(values[1]['Species'], 'Penguin')
|
||||
else :
|
||||
if verbose:
|
||||
print "values= %r" % (values,)
|
||||
raise RuntimeError("Wrong values returned!")
|
||||
|
||||
def test03(self):
|
||||
tabname = "test03"
|
||||
try:
|
||||
self.tdb.Drop(tabname)
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
if verbose:
|
||||
print '...before CreateTable...'
|
||||
self.tdb._db_print()
|
||||
self.tdb.CreateTable(tabname, ['a', 'b', 'c', 'd', 'e'])
|
||||
if verbose:
|
||||
print '...after CreateTable...'
|
||||
self.tdb._db_print()
|
||||
self.tdb.Drop(tabname)
|
||||
if verbose:
|
||||
print '...after Drop...'
|
||||
self.tdb._db_print()
|
||||
self.tdb.CreateTable(tabname, ['a', 'b', 'c', 'd', 'e'])
|
||||
|
||||
try:
|
||||
self.tdb.Insert(tabname,
|
||||
{'a': "",
|
||||
'e': pickle.dumps([{4:5, 6:7}, 'foo'], 1),
|
||||
'f': "Zero"})
|
||||
self.fail('Expected an exception')
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.tdb.Select(tabname, [], conditions={'foo': '123'})
|
||||
self.fail('Expected an exception')
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
|
||||
self.tdb.Insert(tabname,
|
||||
{'a': '42',
|
||||
'b': "bad",
|
||||
'c': "meep",
|
||||
'e': 'Fuzzy wuzzy was a bear'})
|
||||
self.tdb.Insert(tabname,
|
||||
{'a': '581750',
|
||||
'b': "good",
|
||||
'd': "bla",
|
||||
'c': "black",
|
||||
'e': 'fuzzy was here'})
|
||||
self.tdb.Insert(tabname,
|
||||
{'a': '800000',
|
||||
'b': "good",
|
||||
'd': "bla",
|
||||
'c': "black",
|
||||
'e': 'Fuzzy wuzzy is a bear'})
|
||||
|
||||
if verbose:
|
||||
self.tdb._db_print()
|
||||
|
||||
# this should return two rows
|
||||
values = self.tdb.Select(tabname, ['b', 'a', 'd'],
|
||||
conditions={'e': re.compile('wuzzy').search,
|
||||
'a': re.compile('^[0-9]+$').match})
|
||||
self.assertEqual(len(values), 2)
|
||||
|
||||
# now lets delete one of them and try again
|
||||
self.tdb.Delete(tabname, conditions={'b': dbtables.ExactCond('good')})
|
||||
values = self.tdb.Select(
|
||||
tabname, ['a', 'd', 'b'],
|
||||
conditions={'e': dbtables.PrefixCond('Fuzzy')})
|
||||
self.assertEqual(len(values), 1)
|
||||
self.assertEqual(values[0]['d'], None)
|
||||
|
||||
values = self.tdb.Select(tabname, ['b'],
|
||||
conditions={'c': lambda c: c == 'meep'})
|
||||
self.assertEqual(len(values), 1)
|
||||
self.assertEqual(values[0]['b'], "bad")
|
||||
|
||||
|
||||
def test04_MultiCondSelect(self):
|
||||
tabname = "test04_MultiCondSelect"
|
||||
try:
|
||||
self.tdb.Drop(tabname)
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
self.tdb.CreateTable(tabname, ['a', 'b', 'c', 'd', 'e'])
|
||||
|
||||
try:
|
||||
self.tdb.Insert(tabname,
|
||||
{'a': "",
|
||||
'e': pickle.dumps([{4:5, 6:7}, 'foo'], 1),
|
||||
'f': "Zero"})
|
||||
self.fail('Expected an exception')
|
||||
except dbtables.TableDBError:
|
||||
pass
|
||||
|
||||
self.tdb.Insert(tabname, {'a': "A", 'b': "B", 'c': "C", 'd': "D",
|
||||
'e': "E"})
|
||||
self.tdb.Insert(tabname, {'a': "-A", 'b': "-B", 'c': "-C", 'd': "-D",
|
||||
'e': "-E"})
|
||||
self.tdb.Insert(tabname, {'a': "A-", 'b': "B-", 'c': "C-", 'd': "D-",
|
||||
'e': "E-"})
|
||||
|
||||
if verbose:
|
||||
self.tdb._db_print()
|
||||
|
||||
# This select should return 0 rows. it is designed to test
|
||||
# the bug identified and fixed in sourceforge bug # 590449
|
||||
# (Big Thanks to "Rob Tillotson (n9mtb)" for tracking this down
|
||||
# and supplying a fix!! This one caused many headaches to say
|
||||
# the least...)
|
||||
values = self.tdb.Select(tabname, ['b', 'a', 'd'],
|
||||
conditions={'e': dbtables.ExactCond('E'),
|
||||
'a': dbtables.ExactCond('A'),
|
||||
'd': dbtables.PrefixCond('-')
|
||||
} )
|
||||
self.assertEqual(len(values), 0, values)
|
||||
|
||||
|
||||
def test_CreateOrExtend(self):
|
||||
tabname = "test_CreateOrExtend"
|
||||
|
||||
self.tdb.CreateOrExtendTable(
|
||||
tabname, ['name', 'taste', 'filling', 'alcohol content', 'price'])
|
||||
try:
|
||||
self.tdb.Insert(tabname,
|
||||
{'taste': 'crap',
|
||||
'filling': 'no',
|
||||
'is it Guinness?': 'no'})
|
||||
self.fail("Insert should've failed due to bad column name")
|
||||
except:
|
||||
pass
|
||||
self.tdb.CreateOrExtendTable(tabname,
|
||||
['name', 'taste', 'is it Guinness?'])
|
||||
|
||||
# these should both succeed as the table should contain the union of both sets of columns.
|
||||
self.tdb.Insert(tabname, {'taste': 'crap', 'filling': 'no',
|
||||
'is it Guinness?': 'no'})
|
||||
self.tdb.Insert(tabname, {'taste': 'great', 'filling': 'yes',
|
||||
'is it Guinness?': 'yes',
|
||||
'name': 'Guinness'})
|
||||
|
||||
|
||||
def test_CondObjs(self):
|
||||
tabname = "test_CondObjs"
|
||||
|
||||
self.tdb.CreateTable(tabname, ['a', 'b', 'c', 'd', 'e', 'p'])
|
||||
|
||||
self.tdb.Insert(tabname, {'a': "the letter A",
|
||||
'b': "the letter B",
|
||||
'c': "is for cookie"})
|
||||
self.tdb.Insert(tabname, {'a': "is for aardvark",
|
||||
'e': "the letter E",
|
||||
'c': "is for cookie",
|
||||
'd': "is for dog"})
|
||||
self.tdb.Insert(tabname, {'a': "the letter A",
|
||||
'e': "the letter E",
|
||||
'c': "is for cookie",
|
||||
'p': "is for Python"})
|
||||
|
||||
values = self.tdb.Select(
|
||||
tabname, ['p', 'e'],
|
||||
conditions={'e': dbtables.PrefixCond('the l')})
|
||||
self.assertEqual(len(values), 2, values)
|
||||
self.assertEqual(values[0]['e'], values[1]['e'], values)
|
||||
self.assertNotEqual(values[0]['p'], values[1]['p'], values)
|
||||
|
||||
values = self.tdb.Select(
|
||||
tabname, ['d', 'a'],
|
||||
conditions={'a': dbtables.LikeCond('%aardvark%')})
|
||||
self.assertEqual(len(values), 1, values)
|
||||
self.assertEqual(values[0]['d'], "is for dog", values)
|
||||
self.assertEqual(values[0]['a'], "is for aardvark", values)
|
||||
|
||||
values = self.tdb.Select(tabname, None,
|
||||
{'b': dbtables.Cond(),
|
||||
'e':dbtables.LikeCond('%letter%'),
|
||||
'a':dbtables.PrefixCond('is'),
|
||||
'd':dbtables.ExactCond('is for dog'),
|
||||
'c':dbtables.PrefixCond('is for'),
|
||||
'p':lambda s: not s})
|
||||
self.assertEqual(len(values), 1, values)
|
||||
self.assertEqual(values[0]['d'], "is for dog", values)
|
||||
self.assertEqual(values[0]['a'], "is for aardvark", values)
|
||||
|
||||
def test_Delete(self):
|
||||
tabname = "test_Delete"
|
||||
self.tdb.CreateTable(tabname, ['x', 'y', 'z'])
|
||||
|
||||
# prior to 2001-05-09 there was a bug where Delete() would
|
||||
# fail if it encountered any rows that did not have values in
|
||||
# every column.
|
||||
# Hunted and Squashed by <Donwulff> (Jukka Santala - donwulff@nic.fi)
|
||||
self.tdb.Insert(tabname, {'x': 'X1', 'y':'Y1'})
|
||||
self.tdb.Insert(tabname, {'x': 'X2', 'y':'Y2', 'z': 'Z2'})
|
||||
|
||||
self.tdb.Delete(tabname, conditions={'x': dbtables.PrefixCond('X')})
|
||||
values = self.tdb.Select(tabname, ['y'],
|
||||
conditions={'x': dbtables.PrefixCond('X')})
|
||||
self.assertEqual(len(values), 0)
|
||||
|
||||
def test_Modify(self):
|
||||
tabname = "test_Modify"
|
||||
self.tdb.CreateTable(tabname, ['Name', 'Type', 'Access'])
|
||||
|
||||
self.tdb.Insert(tabname, {'Name': 'Index to MP3 files.doc',
|
||||
'Type': 'Word', 'Access': '8'})
|
||||
self.tdb.Insert(tabname, {'Name': 'Nifty.MP3', 'Access': '1'})
|
||||
self.tdb.Insert(tabname, {'Type': 'Unknown', 'Access': '0'})
|
||||
|
||||
def set_type(type):
|
||||
if type is None:
|
||||
return 'MP3'
|
||||
return type
|
||||
|
||||
def increment_access(count):
|
||||
return str(int(count)+1)
|
||||
|
||||
def remove_value(value):
|
||||
return None
|
||||
|
||||
self.tdb.Modify(tabname,
|
||||
conditions={'Access': dbtables.ExactCond('0')},
|
||||
mappings={'Access': remove_value})
|
||||
self.tdb.Modify(tabname,
|
||||
conditions={'Name': dbtables.LikeCond('%MP3%')},
|
||||
mappings={'Type': set_type})
|
||||
self.tdb.Modify(tabname,
|
||||
conditions={'Name': dbtables.LikeCond('%')},
|
||||
mappings={'Access': increment_access})
|
||||
|
||||
try:
|
||||
self.tdb.Modify(tabname,
|
||||
conditions={'Name': dbtables.LikeCond('%')},
|
||||
mappings={'Access': 'What is your quest?'})
|
||||
except TypeError:
|
||||
# success, the string value in mappings isn't callable
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError, "why was TypeError not raised for bad callable?"
|
||||
|
||||
# Delete key in select conditions
|
||||
values = self.tdb.Select(
|
||||
tabname, None,
|
||||
conditions={'Type': dbtables.ExactCond('Unknown')})
|
||||
self.assertEqual(len(values), 1, values)
|
||||
self.assertEqual(values[0]['Name'], None, values)
|
||||
self.assertEqual(values[0]['Access'], None, values)
|
||||
|
||||
# Modify value by select conditions
|
||||
values = self.tdb.Select(
|
||||
tabname, None,
|
||||
conditions={'Name': dbtables.ExactCond('Nifty.MP3')})
|
||||
self.assertEqual(len(values), 1, values)
|
||||
self.assertEqual(values[0]['Type'], "MP3", values)
|
||||
self.assertEqual(values[0]['Access'], "2", values)
|
||||
|
||||
# Make sure change applied only to select conditions
|
||||
values = self.tdb.Select(
|
||||
tabname, None, conditions={'Name': dbtables.LikeCond('%doc%')})
|
||||
self.assertEqual(len(values), 1, values)
|
||||
self.assertEqual(values[0]['Type'], "Word", values)
|
||||
self.assertEqual(values[0]['Access'], "9", values)
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(TableDBTestCase))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,152 +0,0 @@
|
|||
"""TestCases for distributed transactions.
|
||||
"""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, \
|
||||
get_new_database_path
|
||||
|
||||
from test_all import verbose
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class DBTxn_distributed(unittest.TestCase):
|
||||
num_txns=1234
|
||||
nosync=True
|
||||
must_open_db=False
|
||||
def _create_env(self, must_open_db) :
|
||||
self.dbenv = db.DBEnv()
|
||||
self.dbenv.set_tx_max(self.num_txns)
|
||||
self.dbenv.set_lk_max_lockers(self.num_txns*2)
|
||||
self.dbenv.set_lk_max_locks(self.num_txns*2)
|
||||
self.dbenv.set_lk_max_objects(self.num_txns*2)
|
||||
if self.nosync :
|
||||
self.dbenv.set_flags(db.DB_TXN_NOSYNC,True)
|
||||
self.dbenv.open(self.homeDir, db.DB_CREATE | db.DB_THREAD |
|
||||
db.DB_RECOVER |
|
||||
db.DB_INIT_TXN | db.DB_INIT_LOG | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOCK, 0666)
|
||||
self.db = db.DB(self.dbenv)
|
||||
self.db.set_re_len(db.DB_GID_SIZE)
|
||||
if must_open_db :
|
||||
txn=self.dbenv.txn_begin()
|
||||
self.db.open(self.filename,
|
||||
db.DB_QUEUE, db.DB_CREATE | db.DB_THREAD, 0666,
|
||||
txn=txn)
|
||||
txn.commit()
|
||||
|
||||
def setUp(self) :
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.filename = "test"
|
||||
return self._create_env(must_open_db=True)
|
||||
|
||||
def _destroy_env(self):
|
||||
if self.nosync or (db.version()[:2] == (4,6)): # Known bug
|
||||
self.dbenv.log_flush()
|
||||
self.db.close()
|
||||
self.dbenv.close()
|
||||
|
||||
def tearDown(self):
|
||||
self._destroy_env()
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def _recreate_env(self,must_open_db) :
|
||||
self._destroy_env()
|
||||
self._create_env(must_open_db)
|
||||
|
||||
def test01_distributed_transactions(self) :
|
||||
txns=set()
|
||||
adapt = lambda x : x
|
||||
import sys
|
||||
if sys.version_info[0] >= 3 :
|
||||
adapt = lambda x : bytes(x, "ascii")
|
||||
# Create transactions, "prepare" them, and
|
||||
# let them be garbage collected.
|
||||
for i in xrange(self.num_txns) :
|
||||
txn = self.dbenv.txn_begin()
|
||||
gid = "%%%dd" %db.DB_GID_SIZE
|
||||
gid = adapt(gid %i)
|
||||
self.db.put(i, gid, txn=txn, flags=db.DB_APPEND)
|
||||
txns.add(gid)
|
||||
txn.prepare(gid)
|
||||
del txn
|
||||
|
||||
self._recreate_env(self.must_open_db)
|
||||
|
||||
# Get "to be recovered" transactions but
|
||||
# let them be garbage collected.
|
||||
recovered_txns=self.dbenv.txn_recover()
|
||||
self.assertEqual(self.num_txns,len(recovered_txns))
|
||||
for gid,txn in recovered_txns :
|
||||
self.assertTrue(gid in txns)
|
||||
del txn
|
||||
del recovered_txns
|
||||
|
||||
self._recreate_env(self.must_open_db)
|
||||
|
||||
# Get "to be recovered" transactions. Commit, abort and
|
||||
# discard them.
|
||||
recovered_txns=self.dbenv.txn_recover()
|
||||
self.assertEqual(self.num_txns,len(recovered_txns))
|
||||
discard_txns=set()
|
||||
committed_txns=set()
|
||||
state=0
|
||||
for gid,txn in recovered_txns :
|
||||
if state==0 or state==1:
|
||||
committed_txns.add(gid)
|
||||
txn.commit()
|
||||
elif state==2 :
|
||||
txn.abort()
|
||||
elif state==3 :
|
||||
txn.discard()
|
||||
discard_txns.add(gid)
|
||||
state=-1
|
||||
state+=1
|
||||
del txn
|
||||
del recovered_txns
|
||||
|
||||
self._recreate_env(self.must_open_db)
|
||||
|
||||
# Verify the discarded transactions are still
|
||||
# around, and dispose them.
|
||||
recovered_txns=self.dbenv.txn_recover()
|
||||
self.assertEqual(len(discard_txns),len(recovered_txns))
|
||||
for gid,txn in recovered_txns :
|
||||
txn.abort()
|
||||
del txn
|
||||
del recovered_txns
|
||||
|
||||
self._recreate_env(must_open_db=True)
|
||||
|
||||
# Be sure there are not pending transactions.
|
||||
# Check also database size.
|
||||
recovered_txns=self.dbenv.txn_recover()
|
||||
self.assertTrue(len(recovered_txns)==0)
|
||||
self.assertEqual(len(committed_txns),self.db.stat()["nkeys"])
|
||||
|
||||
class DBTxn_distributedSYNC(DBTxn_distributed):
|
||||
nosync=False
|
||||
|
||||
class DBTxn_distributed_must_open_db(DBTxn_distributed):
|
||||
must_open_db=True
|
||||
|
||||
class DBTxn_distributedSYNC_must_open_db(DBTxn_distributed):
|
||||
nosync=False
|
||||
must_open_db=True
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
if db.version() >= (4,5) :
|
||||
suite.addTest(unittest.makeSuite(DBTxn_distributed))
|
||||
suite.addTest(unittest.makeSuite(DBTxn_distributedSYNC))
|
||||
if db.version() >= (4,6) :
|
||||
suite.addTest(unittest.makeSuite(DBTxn_distributed_must_open_db))
|
||||
suite.addTest(unittest.makeSuite(DBTxn_distributedSYNC_must_open_db))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,215 +0,0 @@
|
|||
"""TestCases for checking that it does not segfault when a DBEnv object
|
||||
is closed before its DB objects.
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
import unittest
|
||||
|
||||
from test_all import db, test_support, verbose, get_new_environment_path, get_new_database_path
|
||||
|
||||
# We're going to get warnings in this module about trying to close the db when
|
||||
# its env is already closed. Let's just ignore those.
|
||||
try:
|
||||
import warnings
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
warnings.filterwarnings('ignore',
|
||||
message='DB could not be closed in',
|
||||
category=RuntimeWarning)
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class DBEnvClosedEarlyCrash(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.filename = "test"
|
||||
|
||||
def tearDown(self):
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test01_close_dbenv_before_db(self):
|
||||
dbenv = db.DBEnv()
|
||||
dbenv.open(self.homeDir,
|
||||
db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,
|
||||
0666)
|
||||
|
||||
d = db.DB(dbenv)
|
||||
d2 = db.DB(dbenv)
|
||||
d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
|
||||
self.assertRaises(db.DBNoSuchFileError, d2.open,
|
||||
self.filename+"2", db.DB_BTREE, db.DB_THREAD, 0666)
|
||||
|
||||
d.put("test","this is a test")
|
||||
self.assertEqual(d.get("test"), "this is a test", "put!=get")
|
||||
dbenv.close() # This "close" should close the child db handle also
|
||||
self.assertRaises(db.DBError, d.get, "test")
|
||||
|
||||
def test02_close_dbenv_before_dbcursor(self):
|
||||
dbenv = db.DBEnv()
|
||||
dbenv.open(self.homeDir,
|
||||
db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,
|
||||
0666)
|
||||
|
||||
d = db.DB(dbenv)
|
||||
d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
|
||||
d.put("test","this is a test")
|
||||
d.put("test2","another test")
|
||||
d.put("test3","another one")
|
||||
self.assertEqual(d.get("test"), "this is a test", "put!=get")
|
||||
c=d.cursor()
|
||||
c.first()
|
||||
c.next()
|
||||
d.close() # This "close" should close the child db handle also
|
||||
# db.close should close the child cursor
|
||||
self.assertRaises(db.DBError,c.next)
|
||||
|
||||
d = db.DB(dbenv)
|
||||
d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
c=d.cursor()
|
||||
c.first()
|
||||
c.next()
|
||||
dbenv.close()
|
||||
# The "close" should close the child db handle also, with cursors
|
||||
self.assertRaises(db.DBError, c.next)
|
||||
|
||||
def test03_close_db_before_dbcursor_without_env(self):
|
||||
import os.path
|
||||
path=os.path.join(self.homeDir,self.filename)
|
||||
d = db.DB()
|
||||
d.open(path, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
|
||||
d.put("test","this is a test")
|
||||
d.put("test2","another test")
|
||||
d.put("test3","another one")
|
||||
self.assertEqual(d.get("test"), "this is a test", "put!=get")
|
||||
c=d.cursor()
|
||||
c.first()
|
||||
c.next()
|
||||
d.close()
|
||||
# The "close" should close the child db handle also
|
||||
self.assertRaises(db.DBError, c.next)
|
||||
|
||||
def test04_close_massive(self):
|
||||
dbenv = db.DBEnv()
|
||||
dbenv.open(self.homeDir,
|
||||
db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,
|
||||
0666)
|
||||
|
||||
dbs=[db.DB(dbenv) for i in xrange(16)]
|
||||
cursors=[]
|
||||
for i in dbs :
|
||||
i.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
|
||||
dbs[10].put("test","this is a test")
|
||||
dbs[10].put("test2","another test")
|
||||
dbs[10].put("test3","another one")
|
||||
self.assertEqual(dbs[4].get("test"), "this is a test", "put!=get")
|
||||
|
||||
for i in dbs :
|
||||
cursors.extend([i.cursor() for j in xrange(32)])
|
||||
|
||||
for i in dbs[::3] :
|
||||
i.close()
|
||||
for i in cursors[::3] :
|
||||
i.close()
|
||||
|
||||
# Check for missing exception in DB! (after DB close)
|
||||
self.assertRaises(db.DBError, dbs[9].get, "test")
|
||||
|
||||
# Check for missing exception in DBCursor! (after DB close)
|
||||
self.assertRaises(db.DBError, cursors[101].first)
|
||||
|
||||
cursors[80].first()
|
||||
cursors[80].next()
|
||||
dbenv.close() # This "close" should close the child db handle also
|
||||
# Check for missing exception! (after DBEnv close)
|
||||
self.assertRaises(db.DBError, cursors[80].next)
|
||||
|
||||
def test05_close_dbenv_delete_db_success(self):
|
||||
dbenv = db.DBEnv()
|
||||
dbenv.open(self.homeDir,
|
||||
db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,
|
||||
0666)
|
||||
|
||||
d = db.DB(dbenv)
|
||||
d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
|
||||
dbenv.close() # This "close" should close the child db handle also
|
||||
|
||||
del d
|
||||
try:
|
||||
import gc
|
||||
except ImportError:
|
||||
gc = None
|
||||
if gc:
|
||||
# force d.__del__ [DB_dealloc] to be called
|
||||
gc.collect()
|
||||
|
||||
def test06_close_txn_before_dup_cursor(self) :
|
||||
dbenv = db.DBEnv()
|
||||
dbenv.open(self.homeDir,db.DB_INIT_TXN | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOG | db.DB_CREATE)
|
||||
d = db.DB(dbenv)
|
||||
txn = dbenv.txn_begin()
|
||||
d.open(self.filename, dbtype = db.DB_HASH, flags = db.DB_CREATE,
|
||||
txn=txn)
|
||||
d.put("XXX", "yyy", txn=txn)
|
||||
txn.commit()
|
||||
txn = dbenv.txn_begin()
|
||||
c1 = d.cursor(txn)
|
||||
c2 = c1.dup()
|
||||
self.assertEqual(("XXX", "yyy"), c1.first())
|
||||
|
||||
# Not interested in warnings about implicit close.
|
||||
import warnings
|
||||
if sys.version_info < (2, 6) :
|
||||
# Completely resetting the warning state is
|
||||
# problematic with python >=2.6 with -3 (py3k warning),
|
||||
# because some stdlib modules selectively ignores warnings.
|
||||
warnings.simplefilter("ignore")
|
||||
txn.commit()
|
||||
warnings.resetwarnings()
|
||||
else :
|
||||
# When we drop support for python 2.4
|
||||
# we could use: (in 2.5 we need a __future__ statement)
|
||||
#
|
||||
# with warnings.catch_warnings():
|
||||
# warnings.simplefilter("ignore")
|
||||
# txn.commit()
|
||||
#
|
||||
# We can not use "with" as is, because it would be invalid syntax
|
||||
# in python 2.4 and (with no __future__) 2.5.
|
||||
# Here we simulate "with" following PEP 343 :
|
||||
w = warnings.catch_warnings()
|
||||
w.__enter__()
|
||||
try :
|
||||
warnings.simplefilter("ignore")
|
||||
txn.commit()
|
||||
finally :
|
||||
w.__exit__()
|
||||
|
||||
self.assertRaises(db.DBCursorClosedError, c2.first)
|
||||
|
||||
def test07_close_db_before_sequence(self):
|
||||
import os.path
|
||||
path=os.path.join(self.homeDir,self.filename)
|
||||
d = db.DB()
|
||||
d.open(path, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)
|
||||
dbs=db.DBSequence(d)
|
||||
d.close() # This "close" should close the child DBSequence also
|
||||
dbs.close() # If not closed, core dump (in Berkeley DB 4.6.*)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(DBEnvClosedEarlyCrash))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
"""TestCase for reseting File ID.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, get_new_database_path
|
||||
|
||||
class FileidResetTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db_path_1 = get_new_database_path()
|
||||
self.db_path_2 = get_new_database_path()
|
||||
self.db_env_path = get_new_environment_path()
|
||||
|
||||
def test_fileid_reset(self):
|
||||
# create DB 1
|
||||
self.db1 = db.DB()
|
||||
self.db1.open(self.db_path_1, dbtype=db.DB_HASH, flags=(db.DB_CREATE|db.DB_EXCL))
|
||||
self.db1.put('spam', 'eggs')
|
||||
self.db1.close()
|
||||
|
||||
shutil.copy(self.db_path_1, self.db_path_2)
|
||||
|
||||
self.db2 = db.DB()
|
||||
self.db2.open(self.db_path_2, dbtype=db.DB_HASH)
|
||||
self.db2.put('spam', 'spam')
|
||||
self.db2.close()
|
||||
|
||||
self.db_env = db.DBEnv()
|
||||
self.db_env.open(self.db_env_path, db.DB_CREATE|db.DB_INIT_MPOOL)
|
||||
|
||||
# use fileid_reset() here
|
||||
self.db_env.fileid_reset(self.db_path_2)
|
||||
|
||||
self.db1 = db.DB(self.db_env)
|
||||
self.db1.open(self.db_path_1, dbtype=db.DB_HASH, flags=db.DB_RDONLY)
|
||||
self.assertEqual(self.db1.get('spam'), 'eggs')
|
||||
|
||||
self.db2 = db.DB(self.db_env)
|
||||
self.db2.open(self.db_path_2, dbtype=db.DB_HASH, flags=db.DB_RDONLY)
|
||||
self.assertEqual(self.db2.get('spam'), 'spam')
|
||||
|
||||
self.db1.close()
|
||||
self.db2.close()
|
||||
|
||||
self.db_env.close()
|
||||
|
||||
def tearDown(self):
|
||||
test_support.unlink(self.db_path_1)
|
||||
test_support.unlink(self.db_path_2)
|
||||
test_support.rmtree(self.db_env_path)
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
if db.version() >= (4, 4):
|
||||
suite.addTest(unittest.makeSuite(FileidResetTestCase))
|
||||
return suite
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
"""
|
||||
TestCases for checking set_get_returns_none.
|
||||
"""
|
||||
|
||||
import os, string
|
||||
import unittest
|
||||
|
||||
from test_all import db, verbose, get_new_database_path
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class GetReturnsNoneTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.filename = get_new_database_path()
|
||||
|
||||
def tearDown(self):
|
||||
try:
|
||||
os.remove(self.filename)
|
||||
except os.error:
|
||||
pass
|
||||
|
||||
|
||||
def test01_get_returns_none(self):
|
||||
d = db.DB()
|
||||
d.open(self.filename, db.DB_BTREE, db.DB_CREATE)
|
||||
d.set_get_returns_none(1)
|
||||
|
||||
for x in string.letters:
|
||||
d.put(x, x * 40)
|
||||
|
||||
data = d.get('bad key')
|
||||
self.assertEqual(data, None)
|
||||
|
||||
data = d.get(string.letters[0])
|
||||
self.assertEqual(data, string.letters[0]*40)
|
||||
|
||||
count = 0
|
||||
c = d.cursor()
|
||||
rec = c.first()
|
||||
while rec:
|
||||
count = count + 1
|
||||
rec = c.next()
|
||||
|
||||
self.assertEqual(rec, None)
|
||||
self.assertEqual(count, len(string.letters))
|
||||
|
||||
c.close()
|
||||
d.close()
|
||||
|
||||
|
||||
def test02_get_raises_exception(self):
|
||||
d = db.DB()
|
||||
d.open(self.filename, db.DB_BTREE, db.DB_CREATE)
|
||||
d.set_get_returns_none(0)
|
||||
|
||||
for x in string.letters:
|
||||
d.put(x, x * 40)
|
||||
|
||||
self.assertRaises(db.DBNotFoundError, d.get, 'bad key')
|
||||
self.assertRaises(KeyError, d.get, 'bad key')
|
||||
|
||||
data = d.get(string.letters[0])
|
||||
self.assertEqual(data, string.letters[0]*40)
|
||||
|
||||
count = 0
|
||||
exceptionHappened = 0
|
||||
c = d.cursor()
|
||||
rec = c.first()
|
||||
while rec:
|
||||
count = count + 1
|
||||
try:
|
||||
rec = c.next()
|
||||
except db.DBNotFoundError: # end of the records
|
||||
exceptionHappened = 1
|
||||
break
|
||||
|
||||
self.assertNotEqual(rec, None)
|
||||
self.assertTrue(exceptionHappened)
|
||||
self.assertEqual(count, len(string.letters))
|
||||
|
||||
c.close()
|
||||
d.close()
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(GetReturnsNoneTestCase)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
"""TestCases for using the DB.join and DBCursor.join_item methods.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import unittest
|
||||
|
||||
from test_all import db, dbshelve, test_support, verbose, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
ProductIndex = [
|
||||
('apple', "Convenience Store"),
|
||||
('blueberry', "Farmer's Market"),
|
||||
('shotgun', "S-Mart"), # Aisle 12
|
||||
('pear', "Farmer's Market"),
|
||||
('chainsaw', "S-Mart"), # "Shop smart. Shop S-Mart!"
|
||||
('strawberry', "Farmer's Market"),
|
||||
]
|
||||
|
||||
ColorIndex = [
|
||||
('blue', "blueberry"),
|
||||
('red', "apple"),
|
||||
('red', "chainsaw"),
|
||||
('red', "strawberry"),
|
||||
('yellow', "peach"),
|
||||
('yellow', "pear"),
|
||||
('black', "shotgun"),
|
||||
]
|
||||
|
||||
class JoinTestCase(unittest.TestCase):
|
||||
keytype = ''
|
||||
|
||||
def setUp(self):
|
||||
self.filename = self.__class__.__name__ + '.db'
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL | db.DB_INIT_LOCK )
|
||||
|
||||
def tearDown(self):
|
||||
self.env.close()
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test01_join(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test01_join..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
# create and populate primary index
|
||||
priDB = db.DB(self.env)
|
||||
priDB.open(self.filename, "primary", db.DB_BTREE, db.DB_CREATE)
|
||||
map(lambda t, priDB=priDB: priDB.put(*t), ProductIndex)
|
||||
|
||||
# create and populate secondary index
|
||||
secDB = db.DB(self.env)
|
||||
secDB.set_flags(db.DB_DUP | db.DB_DUPSORT)
|
||||
secDB.open(self.filename, "secondary", db.DB_BTREE, db.DB_CREATE)
|
||||
map(lambda t, secDB=secDB: secDB.put(*t), ColorIndex)
|
||||
|
||||
sCursor = None
|
||||
jCursor = None
|
||||
try:
|
||||
# lets look up all of the red Products
|
||||
sCursor = secDB.cursor()
|
||||
# Don't do the .set() in an assert, or you can get a bogus failure
|
||||
# when running python -O
|
||||
tmp = sCursor.set('red')
|
||||
self.assertTrue(tmp)
|
||||
|
||||
# FIXME: jCursor doesn't properly hold a reference to its
|
||||
# cursors, if they are closed before jcursor is used it
|
||||
# can cause a crash.
|
||||
jCursor = priDB.join([sCursor])
|
||||
|
||||
if jCursor.get(0) != ('apple', "Convenience Store"):
|
||||
self.fail("join cursor positioned wrong")
|
||||
if jCursor.join_item() != 'chainsaw':
|
||||
self.fail("DBCursor.join_item returned wrong item")
|
||||
if jCursor.get(0)[0] != 'strawberry':
|
||||
self.fail("join cursor returned wrong thing")
|
||||
if jCursor.get(0): # there were only three red items to return
|
||||
self.fail("join cursor returned too many items")
|
||||
finally:
|
||||
if jCursor:
|
||||
jCursor.close()
|
||||
if sCursor:
|
||||
sCursor.close()
|
||||
priDB.close()
|
||||
secDB.close()
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
suite.addTest(unittest.makeSuite(JoinTestCase))
|
||||
|
||||
return suite
|
||||
|
|
@ -1,184 +0,0 @@
|
|||
"""
|
||||
TestCases for testing the locking sub-system.
|
||||
"""
|
||||
|
||||
import time
|
||||
|
||||
import unittest
|
||||
from test_all import db, test_support, verbose, have_threads, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
if have_threads :
|
||||
from threading import Thread
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
from threading import currentThread
|
||||
else :
|
||||
from threading import current_thread as currentThread
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class LockingTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_THREAD | db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOCK | db.DB_CREATE)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
self.env.close()
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
|
||||
def test01_simple(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test01_simple..." % self.__class__.__name__
|
||||
|
||||
anID = self.env.lock_id()
|
||||
if verbose:
|
||||
print "locker ID: %s" % anID
|
||||
lock = self.env.lock_get(anID, "some locked thing", db.DB_LOCK_WRITE)
|
||||
if verbose:
|
||||
print "Aquired lock: %s" % lock
|
||||
self.env.lock_put(lock)
|
||||
if verbose:
|
||||
print "Released lock: %s" % lock
|
||||
self.env.lock_id_free(anID)
|
||||
|
||||
|
||||
def test02_threaded(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test02_threaded..." % self.__class__.__name__
|
||||
|
||||
threads = []
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_WRITE,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_READ,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_READ,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_WRITE,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_READ,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_READ,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_WRITE,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_WRITE,)))
|
||||
threads.append(Thread(target = self.theThread,
|
||||
args=(db.DB_LOCK_WRITE,)))
|
||||
|
||||
for t in threads:
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
t.setDaemon(True)
|
||||
else :
|
||||
t.daemon = True
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
def test03_lock_timeout(self):
|
||||
self.env.set_timeout(0, db.DB_SET_LOCK_TIMEOUT)
|
||||
self.assertEqual(self.env.get_timeout(db.DB_SET_LOCK_TIMEOUT), 0)
|
||||
self.env.set_timeout(0, db.DB_SET_TXN_TIMEOUT)
|
||||
self.assertEqual(self.env.get_timeout(db.DB_SET_TXN_TIMEOUT), 0)
|
||||
self.env.set_timeout(123456, db.DB_SET_LOCK_TIMEOUT)
|
||||
self.assertEqual(self.env.get_timeout(db.DB_SET_LOCK_TIMEOUT), 123456)
|
||||
self.env.set_timeout(7890123, db.DB_SET_TXN_TIMEOUT)
|
||||
self.assertEqual(self.env.get_timeout(db.DB_SET_TXN_TIMEOUT), 7890123)
|
||||
|
||||
def test04_lock_timeout2(self):
|
||||
self.env.set_timeout(0, db.DB_SET_LOCK_TIMEOUT)
|
||||
self.env.set_timeout(0, db.DB_SET_TXN_TIMEOUT)
|
||||
self.env.set_timeout(123456, db.DB_SET_LOCK_TIMEOUT)
|
||||
self.env.set_timeout(7890123, db.DB_SET_TXN_TIMEOUT)
|
||||
|
||||
def deadlock_detection() :
|
||||
while not deadlock_detection.end :
|
||||
deadlock_detection.count = \
|
||||
self.env.lock_detect(db.DB_LOCK_EXPIRE)
|
||||
if deadlock_detection.count :
|
||||
while not deadlock_detection.end :
|
||||
pass
|
||||
break
|
||||
time.sleep(0.01)
|
||||
|
||||
deadlock_detection.end=False
|
||||
deadlock_detection.count=0
|
||||
t=Thread(target=deadlock_detection)
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
t.setDaemon(True)
|
||||
else :
|
||||
t.daemon = True
|
||||
t.start()
|
||||
self.env.set_timeout(100000, db.DB_SET_LOCK_TIMEOUT)
|
||||
anID = self.env.lock_id()
|
||||
anID2 = self.env.lock_id()
|
||||
self.assertNotEqual(anID, anID2)
|
||||
lock = self.env.lock_get(anID, "shared lock", db.DB_LOCK_WRITE)
|
||||
start_time=time.time()
|
||||
self.assertRaises(db.DBLockNotGrantedError,
|
||||
self.env.lock_get,anID2, "shared lock", db.DB_LOCK_READ)
|
||||
end_time=time.time()
|
||||
deadlock_detection.end=True
|
||||
# Floating point rounding
|
||||
self.assertTrue((end_time-start_time) >= 0.0999)
|
||||
self.env.lock_put(lock)
|
||||
t.join()
|
||||
|
||||
self.env.lock_id_free(anID)
|
||||
self.env.lock_id_free(anID2)
|
||||
|
||||
if db.version() >= (4,6):
|
||||
self.assertTrue(deadlock_detection.count>0)
|
||||
|
||||
def theThread(self, lockType):
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
|
||||
if lockType == db.DB_LOCK_WRITE:
|
||||
lt = "write"
|
||||
else:
|
||||
lt = "read"
|
||||
|
||||
anID = self.env.lock_id()
|
||||
if verbose:
|
||||
print "%s: locker ID: %s" % (name, anID)
|
||||
|
||||
for i in xrange(1000) :
|
||||
lock = self.env.lock_get(anID, "some locked thing", lockType)
|
||||
if verbose:
|
||||
print "%s: Aquired %s lock: %s" % (name, lt, lock)
|
||||
|
||||
self.env.lock_put(lock)
|
||||
if verbose:
|
||||
print "%s: Released %s lock: %s" % (name, lt, lock)
|
||||
|
||||
self.env.lock_id_free(anID)
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
if have_threads:
|
||||
suite.addTest(unittest.makeSuite(LockingTestCase))
|
||||
else:
|
||||
suite.addTest(unittest.makeSuite(LockingTestCase, 'test01'))
|
||||
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
"""Miscellaneous bsddb module test cases
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
import unittest
|
||||
|
||||
from test_all import db, dbshelve, hashopen, test_support, get_new_environment_path, get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class MiscTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.filename = get_new_database_path()
|
||||
self.homeDir = get_new_environment_path()
|
||||
|
||||
def tearDown(self):
|
||||
test_support.unlink(self.filename)
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test01_badpointer(self):
|
||||
dbs = dbshelve.open(self.filename)
|
||||
dbs.close()
|
||||
self.assertRaises(db.DBError, dbs.get, "foo")
|
||||
|
||||
def test02_db_home(self):
|
||||
env = db.DBEnv()
|
||||
# check for crash fixed when db_home is used before open()
|
||||
self.assertTrue(env.db_home is None)
|
||||
env.open(self.homeDir, db.DB_CREATE)
|
||||
if sys.version_info[0] < 3 :
|
||||
self.assertEqual(self.homeDir, env.db_home)
|
||||
else :
|
||||
self.assertEqual(bytes(self.homeDir, "ascii"), env.db_home)
|
||||
|
||||
def test03_repr_closed_db(self):
|
||||
db = hashopen(self.filename)
|
||||
db.close()
|
||||
rp = repr(db)
|
||||
self.assertEqual(rp, "{}")
|
||||
|
||||
def test04_repr_db(self) :
|
||||
db = hashopen(self.filename)
|
||||
d = {}
|
||||
for i in xrange(100) :
|
||||
db[repr(i)] = repr(100*i)
|
||||
d[repr(i)] = repr(100*i)
|
||||
db.close()
|
||||
db = hashopen(self.filename)
|
||||
rp = repr(db)
|
||||
self.assertEqual(rp, repr(d))
|
||||
db.close()
|
||||
|
||||
# http://sourceforge.net/tracker/index.php?func=detail&aid=1708868&group_id=13900&atid=313900
|
||||
#
|
||||
# See the bug report for details.
|
||||
#
|
||||
# The problem was that make_key_dbt() was not allocating a copy of
|
||||
# string keys but FREE_DBT() was always being told to free it when the
|
||||
# database was opened with DB_THREAD.
|
||||
def test05_double_free_make_key_dbt(self):
|
||||
try:
|
||||
db1 = db.DB()
|
||||
db1.open(self.filename, None, db.DB_BTREE,
|
||||
db.DB_CREATE | db.DB_THREAD)
|
||||
|
||||
curs = db1.cursor()
|
||||
t = curs.get("/foo", db.DB_SET)
|
||||
# double free happened during exit from DBC_get
|
||||
finally:
|
||||
db1.close()
|
||||
test_support.unlink(self.filename)
|
||||
|
||||
def test06_key_with_null_bytes(self):
|
||||
try:
|
||||
db1 = db.DB()
|
||||
db1.open(self.filename, None, db.DB_HASH, db.DB_CREATE)
|
||||
db1['a'] = 'eh?'
|
||||
db1['a\x00'] = 'eh zed.'
|
||||
db1['a\x00a'] = 'eh zed eh?'
|
||||
db1['aaa'] = 'eh eh eh!'
|
||||
keys = db1.keys()
|
||||
keys.sort()
|
||||
self.assertEqual(['a', 'a\x00', 'a\x00a', 'aaa'], keys)
|
||||
self.assertEqual(db1['a'], 'eh?')
|
||||
self.assertEqual(db1['a\x00'], 'eh zed.')
|
||||
self.assertEqual(db1['a\x00a'], 'eh zed eh?')
|
||||
self.assertEqual(db1['aaa'], 'eh eh eh!')
|
||||
finally:
|
||||
db1.close()
|
||||
test_support.unlink(self.filename)
|
||||
|
||||
def test07_DB_set_flags_persists(self):
|
||||
try:
|
||||
db1 = db.DB()
|
||||
db1.set_flags(db.DB_DUPSORT)
|
||||
db1.open(self.filename, db.DB_HASH, db.DB_CREATE)
|
||||
db1['a'] = 'eh'
|
||||
db1['a'] = 'A'
|
||||
self.assertEqual([('a', 'A')], db1.items())
|
||||
db1.put('a', 'Aa')
|
||||
self.assertEqual([('a', 'A'), ('a', 'Aa')], db1.items())
|
||||
db1.close()
|
||||
db1 = db.DB()
|
||||
# no set_flags call, we're testing that it reads and obeys
|
||||
# the flags on open.
|
||||
db1.open(self.filename, db.DB_HASH)
|
||||
self.assertEqual([('a', 'A'), ('a', 'Aa')], db1.items())
|
||||
# if it read the flags right this will replace all values
|
||||
# for key 'a' instead of adding a new one. (as a dict should)
|
||||
db1['a'] = 'new A'
|
||||
self.assertEqual([('a', 'new A')], db1.items())
|
||||
finally:
|
||||
db1.close()
|
||||
test_support.unlink(self.filename)
|
||||
|
||||
|
||||
def test08_ExceptionTypes(self) :
|
||||
self.assertTrue(issubclass(db.DBError, Exception))
|
||||
for i, j in db.__dict__.items() :
|
||||
if i.startswith("DB") and i.endswith("Error") :
|
||||
self.assertTrue(issubclass(j, db.DBError), msg=i)
|
||||
if i not in ("DBKeyEmptyError", "DBNotFoundError") :
|
||||
self.assertFalse(issubclass(j, KeyError), msg=i)
|
||||
|
||||
# This two exceptions have two bases
|
||||
self.assertTrue(issubclass(db.DBKeyEmptyError, KeyError))
|
||||
self.assertTrue(issubclass(db.DBNotFoundError, KeyError))
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(MiscTestCase)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] < 3 :
|
||||
try:
|
||||
import cPickle
|
||||
except ImportError:
|
||||
cPickle = None
|
||||
else :
|
||||
cPickle = None
|
||||
|
||||
import unittest
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class pickleTestCase(unittest.TestCase):
|
||||
"""Verify that DBError can be pickled and unpickled"""
|
||||
db_name = 'test-dbobj.db'
|
||||
|
||||
def setUp(self):
|
||||
self.homeDir = get_new_environment_path()
|
||||
|
||||
def tearDown(self):
|
||||
if hasattr(self, 'db'):
|
||||
del self.db
|
||||
if hasattr(self, 'env'):
|
||||
del self.env
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def _base_test_pickle_DBError(self, pickle):
|
||||
self.env = db.DBEnv()
|
||||
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
||||
self.db = db.DB(self.env)
|
||||
self.db.open(self.db_name, db.DB_HASH, db.DB_CREATE)
|
||||
self.db.put('spam', 'eggs')
|
||||
self.assertEqual(self.db['spam'], 'eggs')
|
||||
try:
|
||||
self.db.put('spam', 'ham', flags=db.DB_NOOVERWRITE)
|
||||
except db.DBError, egg:
|
||||
pickledEgg = pickle.dumps(egg)
|
||||
#print repr(pickledEgg)
|
||||
rottenEgg = pickle.loads(pickledEgg)
|
||||
if rottenEgg.args != egg.args or type(rottenEgg) != type(egg):
|
||||
raise Exception, (rottenEgg, '!=', egg)
|
||||
else:
|
||||
raise Exception, "where's my DBError exception?!?"
|
||||
|
||||
self.db.close()
|
||||
self.env.close()
|
||||
|
||||
def test01_pickle_DBError(self):
|
||||
self._base_test_pickle_DBError(pickle=pickle)
|
||||
|
||||
if cPickle:
|
||||
def test02_cPickle_DBError(self):
|
||||
self._base_test_pickle_DBError(pickle=cPickle)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(pickleTestCase)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,163 +0,0 @@
|
|||
"""
|
||||
TestCases for exercising a Queue DB.
|
||||
"""
|
||||
|
||||
import os, string
|
||||
from pprint import pprint
|
||||
import unittest
|
||||
|
||||
from test_all import db, verbose, get_new_database_path
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class SimpleQueueTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.filename = get_new_database_path()
|
||||
|
||||
def tearDown(self):
|
||||
try:
|
||||
os.remove(self.filename)
|
||||
except os.error:
|
||||
pass
|
||||
|
||||
|
||||
def test01_basic(self):
|
||||
# Basic Queue tests using the deprecated DBCursor.consume method.
|
||||
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test01_basic..." % self.__class__.__name__
|
||||
|
||||
d = db.DB()
|
||||
d.set_re_len(40) # Queues must be fixed length
|
||||
d.open(self.filename, db.DB_QUEUE, db.DB_CREATE)
|
||||
|
||||
if verbose:
|
||||
print "before appends" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
for x in string.letters:
|
||||
d.append(x * 40)
|
||||
|
||||
self.assertEqual(len(d), len(string.letters))
|
||||
|
||||
d.put(100, "some more data")
|
||||
d.put(101, "and some more ")
|
||||
d.put(75, "out of order")
|
||||
d.put(1, "replacement data")
|
||||
|
||||
self.assertEqual(len(d), len(string.letters)+3)
|
||||
|
||||
if verbose:
|
||||
print "before close" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
d.close()
|
||||
del d
|
||||
d = db.DB()
|
||||
d.open(self.filename)
|
||||
|
||||
if verbose:
|
||||
print "after open" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
# Test "txn" as a positional parameter
|
||||
d.append("one more", None)
|
||||
# Test "txn" as a keyword parameter
|
||||
d.append("another one", txn=None)
|
||||
|
||||
c = d.cursor()
|
||||
|
||||
if verbose:
|
||||
print "after append" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
rec = c.consume()
|
||||
while rec:
|
||||
if verbose:
|
||||
print rec
|
||||
rec = c.consume()
|
||||
c.close()
|
||||
|
||||
if verbose:
|
||||
print "after consume loop" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
self.assertEqual(len(d), 0, \
|
||||
"if you see this message then you need to rebuild " \
|
||||
"Berkeley DB 3.1.17 with the patch in patches/qam_stat.diff")
|
||||
|
||||
d.close()
|
||||
|
||||
|
||||
|
||||
def test02_basicPost32(self):
|
||||
# Basic Queue tests using the new DB.consume method in DB 3.2+
|
||||
# (No cursor needed)
|
||||
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test02_basicPost32..." % self.__class__.__name__
|
||||
|
||||
d = db.DB()
|
||||
d.set_re_len(40) # Queues must be fixed length
|
||||
d.open(self.filename, db.DB_QUEUE, db.DB_CREATE)
|
||||
|
||||
if verbose:
|
||||
print "before appends" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
for x in string.letters:
|
||||
d.append(x * 40)
|
||||
|
||||
self.assertEqual(len(d), len(string.letters))
|
||||
|
||||
d.put(100, "some more data")
|
||||
d.put(101, "and some more ")
|
||||
d.put(75, "out of order")
|
||||
d.put(1, "replacement data")
|
||||
|
||||
self.assertEqual(len(d), len(string.letters)+3)
|
||||
|
||||
if verbose:
|
||||
print "before close" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
d.close()
|
||||
del d
|
||||
d = db.DB()
|
||||
d.open(self.filename)
|
||||
#d.set_get_returns_none(true)
|
||||
|
||||
if verbose:
|
||||
print "after open" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
d.append("one more")
|
||||
|
||||
if verbose:
|
||||
print "after append" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
rec = d.consume()
|
||||
while rec:
|
||||
if verbose:
|
||||
print rec
|
||||
rec = d.consume()
|
||||
|
||||
if verbose:
|
||||
print "after consume loop" + '-' * 30
|
||||
pprint(d.stat())
|
||||
|
||||
d.close()
|
||||
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(SimpleQueueTestCase)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,320 +0,0 @@
|
|||
"""TestCases for exercising a Recno DB.
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
import errno
|
||||
from pprint import pprint
|
||||
import unittest
|
||||
|
||||
from test_all import db, test_support, verbose, get_new_environment_path, get_new_database_path
|
||||
|
||||
letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class SimpleRecnoTestCase(unittest.TestCase):
|
||||
if (sys.version_info < (2, 7)) or ((sys.version_info >= (3, 0)) and
|
||||
(sys.version_info < (3, 2))) :
|
||||
def assertIsInstance(self, obj, datatype, msg=None) :
|
||||
return self.assertEqual(type(obj), datatype, msg=msg)
|
||||
def assertGreaterEqual(self, a, b, msg=None) :
|
||||
return self.assertTrue(a>=b, msg=msg)
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.filename = get_new_database_path()
|
||||
self.homeDir = None
|
||||
|
||||
def tearDown(self):
|
||||
test_support.unlink(self.filename)
|
||||
if self.homeDir:
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test01_basic(self):
|
||||
d = db.DB()
|
||||
|
||||
get_returns_none = d.set_get_returns_none(2)
|
||||
d.set_get_returns_none(get_returns_none)
|
||||
|
||||
d.open(self.filename, db.DB_RECNO, db.DB_CREATE)
|
||||
|
||||
for x in letters:
|
||||
recno = d.append(x * 60)
|
||||
self.assertIsInstance(recno, int)
|
||||
self.assertGreaterEqual(recno, 1)
|
||||
if verbose:
|
||||
print recno,
|
||||
|
||||
if verbose: print
|
||||
|
||||
stat = d.stat()
|
||||
if verbose:
|
||||
pprint(stat)
|
||||
|
||||
for recno in range(1, len(d)+1):
|
||||
data = d[recno]
|
||||
if verbose:
|
||||
print data
|
||||
|
||||
self.assertIsInstance(data, str)
|
||||
self.assertEqual(data, d.get(recno))
|
||||
|
||||
try:
|
||||
data = d[0] # This should raise a KeyError!?!?!
|
||||
except db.DBInvalidArgError, val:
|
||||
if sys.version_info < (2, 6) :
|
||||
self.assertEqual(val[0], db.EINVAL)
|
||||
else :
|
||||
self.assertEqual(val.args[0], db.EINVAL)
|
||||
if verbose: print val
|
||||
else:
|
||||
self.fail("expected exception")
|
||||
|
||||
# test that has_key raises DB exceptions (fixed in pybsddb 4.3.2)
|
||||
try:
|
||||
d.has_key(0)
|
||||
except db.DBError, val:
|
||||
pass
|
||||
else:
|
||||
self.fail("has_key did not raise a proper exception")
|
||||
|
||||
try:
|
||||
data = d[100]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self.fail("expected exception")
|
||||
|
||||
try:
|
||||
data = d.get(100)
|
||||
except db.DBNotFoundError, val:
|
||||
if get_returns_none:
|
||||
self.fail("unexpected exception")
|
||||
else:
|
||||
self.assertEqual(data, None)
|
||||
|
||||
keys = d.keys()
|
||||
if verbose:
|
||||
print keys
|
||||
self.assertIsInstance(keys, list)
|
||||
self.assertIsInstance(keys[0], int)
|
||||
self.assertEqual(len(keys), len(d))
|
||||
|
||||
items = d.items()
|
||||
if verbose:
|
||||
pprint(items)
|
||||
self.assertIsInstance(items, list)
|
||||
self.assertIsInstance(items[0], tuple)
|
||||
self.assertEqual(len(items[0]), 2)
|
||||
self.assertIsInstance(items[0][0], int)
|
||||
self.assertIsInstance(items[0][1], str)
|
||||
self.assertEqual(len(items), len(d))
|
||||
|
||||
self.assertTrue(d.has_key(25))
|
||||
|
||||
del d[25]
|
||||
self.assertFalse(d.has_key(25))
|
||||
|
||||
d.delete(13)
|
||||
self.assertFalse(d.has_key(13))
|
||||
|
||||
data = d.get_both(26, "z" * 60)
|
||||
self.assertEqual(data, "z" * 60, 'was %r' % data)
|
||||
if verbose:
|
||||
print data
|
||||
|
||||
fd = d.fd()
|
||||
if verbose:
|
||||
print fd
|
||||
|
||||
c = d.cursor()
|
||||
rec = c.first()
|
||||
while rec:
|
||||
if verbose:
|
||||
print rec
|
||||
rec = c.next()
|
||||
|
||||
c.set(50)
|
||||
rec = c.current()
|
||||
if verbose:
|
||||
print rec
|
||||
|
||||
c.put(-1, "a replacement record", db.DB_CURRENT)
|
||||
|
||||
c.set(50)
|
||||
rec = c.current()
|
||||
self.assertEqual(rec, (50, "a replacement record"))
|
||||
if verbose:
|
||||
print rec
|
||||
|
||||
rec = c.set_range(30)
|
||||
if verbose:
|
||||
print rec
|
||||
|
||||
# test that non-existent key lookups work (and that
|
||||
# DBC_set_range doesn't have a memleak under valgrind)
|
||||
rec = c.set_range(999999)
|
||||
self.assertEqual(rec, None)
|
||||
if verbose:
|
||||
print rec
|
||||
|
||||
c.close()
|
||||
d.close()
|
||||
|
||||
d = db.DB()
|
||||
d.open(self.filename)
|
||||
c = d.cursor()
|
||||
|
||||
# put a record beyond the consecutive end of the recno's
|
||||
d[100] = "way out there"
|
||||
self.assertEqual(d[100], "way out there")
|
||||
|
||||
try:
|
||||
data = d[99]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self.fail("expected exception")
|
||||
|
||||
try:
|
||||
d.get(99)
|
||||
except db.DBKeyEmptyError, val:
|
||||
if get_returns_none:
|
||||
self.fail("unexpected DBKeyEmptyError exception")
|
||||
else:
|
||||
if sys.version_info < (2, 6) :
|
||||
self.assertEqual(val[0], db.DB_KEYEMPTY)
|
||||
else :
|
||||
self.assertEqual(val.args[0], db.DB_KEYEMPTY)
|
||||
if verbose: print val
|
||||
else:
|
||||
if not get_returns_none:
|
||||
self.fail("expected exception")
|
||||
|
||||
rec = c.set(40)
|
||||
while rec:
|
||||
if verbose:
|
||||
print rec
|
||||
rec = c.next()
|
||||
|
||||
c.close()
|
||||
d.close()
|
||||
|
||||
def test02_WithSource(self):
|
||||
"""
|
||||
A Recno file that is given a "backing source file" is essentially a
|
||||
simple ASCII file. Normally each record is delimited by \n and so is
|
||||
just a line in the file, but you can set a different record delimiter
|
||||
if needed.
|
||||
"""
|
||||
homeDir = get_new_environment_path()
|
||||
self.homeDir = homeDir
|
||||
source = os.path.join(homeDir, 'test_recno.txt')
|
||||
if not os.path.isdir(homeDir):
|
||||
os.mkdir(homeDir)
|
||||
f = open(source, 'w') # create the file
|
||||
f.close()
|
||||
|
||||
d = db.DB()
|
||||
# This is the default value, just checking if both int
|
||||
d.set_re_delim(0x0A)
|
||||
d.set_re_delim('\n') # and char can be used...
|
||||
d.set_re_source(source)
|
||||
d.open(self.filename, db.DB_RECNO, db.DB_CREATE)
|
||||
|
||||
data = "The quick brown fox jumped over the lazy dog".split()
|
||||
for datum in data:
|
||||
d.append(datum)
|
||||
d.sync()
|
||||
d.close()
|
||||
|
||||
# get the text from the backing source
|
||||
f = open(source, 'r')
|
||||
text = f.read()
|
||||
f.close()
|
||||
text = text.strip()
|
||||
if verbose:
|
||||
print text
|
||||
print data
|
||||
print text.split('\n')
|
||||
|
||||
self.assertEqual(text.split('\n'), data)
|
||||
|
||||
# open as a DB again
|
||||
d = db.DB()
|
||||
d.set_re_source(source)
|
||||
d.open(self.filename, db.DB_RECNO)
|
||||
|
||||
d[3] = 'reddish-brown'
|
||||
d[8] = 'comatose'
|
||||
|
||||
d.sync()
|
||||
d.close()
|
||||
|
||||
f = open(source, 'r')
|
||||
text = f.read()
|
||||
f.close()
|
||||
text = text.strip()
|
||||
if verbose:
|
||||
print text
|
||||
print text.split('\n')
|
||||
|
||||
self.assertEqual(text.split('\n'),
|
||||
"The quick reddish-brown fox jumped over the comatose dog".split())
|
||||
|
||||
def test03_FixedLength(self):
|
||||
d = db.DB()
|
||||
d.set_re_len(40) # fixed length records, 40 bytes long
|
||||
d.set_re_pad('-') # sets the pad character...
|
||||
d.set_re_pad(45) # ...test both int and char
|
||||
d.open(self.filename, db.DB_RECNO, db.DB_CREATE)
|
||||
|
||||
for x in letters:
|
||||
d.append(x * 35) # These will be padded
|
||||
|
||||
d.append('.' * 40) # this one will be exact
|
||||
|
||||
try: # this one will fail
|
||||
d.append('bad' * 20)
|
||||
except db.DBInvalidArgError, val:
|
||||
if sys.version_info < (2, 6) :
|
||||
self.assertEqual(val[0], db.EINVAL)
|
||||
else :
|
||||
self.assertEqual(val.args[0], db.EINVAL)
|
||||
if verbose: print val
|
||||
else:
|
||||
self.fail("expected exception")
|
||||
|
||||
c = d.cursor()
|
||||
rec = c.first()
|
||||
while rec:
|
||||
if verbose:
|
||||
print rec
|
||||
rec = c.next()
|
||||
|
||||
c.close()
|
||||
d.close()
|
||||
|
||||
def test04_get_size_empty(self) :
|
||||
d = db.DB()
|
||||
d.open(self.filename, dbtype=db.DB_RECNO, flags=db.DB_CREATE)
|
||||
|
||||
row_id = d.append(' ')
|
||||
self.assertEqual(1, d.get_size(key=row_id))
|
||||
row_id = d.append('')
|
||||
self.assertEqual(0, d.get_size(key=row_id))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(SimpleRecnoTestCase)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,543 +0,0 @@
|
|||
"""TestCases for distributed transactions.
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from test_all import db, test_support, have_threads, verbose, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class DBReplication(unittest.TestCase) :
|
||||
def setUp(self) :
|
||||
self.homeDirMaster = get_new_environment_path()
|
||||
self.homeDirClient = get_new_environment_path()
|
||||
|
||||
self.dbenvMaster = db.DBEnv()
|
||||
self.dbenvClient = db.DBEnv()
|
||||
|
||||
# Must use "DB_THREAD" because the Replication Manager will
|
||||
# be executed in other threads but will use the same environment.
|
||||
# http://forums.oracle.com/forums/thread.jspa?threadID=645788&tstart=0
|
||||
self.dbenvMaster.open(self.homeDirMaster, db.DB_CREATE | db.DB_INIT_TXN
|
||||
| db.DB_INIT_LOG | db.DB_INIT_MPOOL | db.DB_INIT_LOCK |
|
||||
db.DB_INIT_REP | db.DB_RECOVER | db.DB_THREAD, 0666)
|
||||
self.dbenvClient.open(self.homeDirClient, db.DB_CREATE | db.DB_INIT_TXN
|
||||
| db.DB_INIT_LOG | db.DB_INIT_MPOOL | db.DB_INIT_LOCK |
|
||||
db.DB_INIT_REP | db.DB_RECOVER | db.DB_THREAD, 0666)
|
||||
|
||||
self.confirmed_master=self.client_startupdone=False
|
||||
def confirmed_master(a,b,c) :
|
||||
if b==db.DB_EVENT_REP_MASTER :
|
||||
self.confirmed_master=True
|
||||
|
||||
def client_startupdone(a,b,c) :
|
||||
if b==db.DB_EVENT_REP_STARTUPDONE :
|
||||
self.client_startupdone=True
|
||||
|
||||
self.dbenvMaster.set_event_notify(confirmed_master)
|
||||
self.dbenvClient.set_event_notify(client_startupdone)
|
||||
|
||||
#self.dbenvMaster.set_verbose(db.DB_VERB_REPLICATION, True)
|
||||
#self.dbenvMaster.set_verbose(db.DB_VERB_FILEOPS_ALL, True)
|
||||
#self.dbenvClient.set_verbose(db.DB_VERB_REPLICATION, True)
|
||||
#self.dbenvClient.set_verbose(db.DB_VERB_FILEOPS_ALL, True)
|
||||
|
||||
self.dbMaster = self.dbClient = None
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
if self.dbClient :
|
||||
self.dbClient.close()
|
||||
if self.dbMaster :
|
||||
self.dbMaster.close()
|
||||
|
||||
# Here we assign dummy event handlers to allow GC of the test object.
|
||||
# Since the dummy handler doesn't use any outer scope variable, it
|
||||
# doesn't keep any reference to the test object.
|
||||
def dummy(*args) :
|
||||
pass
|
||||
self.dbenvMaster.set_event_notify(dummy)
|
||||
self.dbenvClient.set_event_notify(dummy)
|
||||
|
||||
self.dbenvClient.close()
|
||||
self.dbenvMaster.close()
|
||||
test_support.rmtree(self.homeDirClient)
|
||||
test_support.rmtree(self.homeDirMaster)
|
||||
|
||||
class DBReplicationManager(DBReplication) :
|
||||
def test01_basic_replication(self) :
|
||||
master_port = test_support.find_unused_port()
|
||||
client_port = test_support.find_unused_port()
|
||||
if db.version() >= (5, 2) :
|
||||
self.site = self.dbenvMaster.repmgr_site("127.0.0.1", master_port)
|
||||
self.site.set_config(db.DB_GROUP_CREATOR, True)
|
||||
self.site.set_config(db.DB_LOCAL_SITE, True)
|
||||
self.site2 = self.dbenvMaster.repmgr_site("127.0.0.1", client_port)
|
||||
|
||||
self.site3 = self.dbenvClient.repmgr_site("127.0.0.1", master_port)
|
||||
self.site3.set_config(db.DB_BOOTSTRAP_HELPER, True)
|
||||
self.site4 = self.dbenvClient.repmgr_site("127.0.0.1", client_port)
|
||||
self.site4.set_config(db.DB_LOCAL_SITE, True)
|
||||
|
||||
d = {
|
||||
db.DB_BOOTSTRAP_HELPER: [False, False, True, False],
|
||||
db.DB_GROUP_CREATOR: [True, False, False, False],
|
||||
db.DB_LEGACY: [False, False, False, False],
|
||||
db.DB_LOCAL_SITE: [True, False, False, True],
|
||||
db.DB_REPMGR_PEER: [False, False, False, False ],
|
||||
}
|
||||
|
||||
for i, j in d.items() :
|
||||
for k, v in \
|
||||
zip([self.site, self.site2, self.site3, self.site4], j) :
|
||||
if v :
|
||||
self.assertTrue(k.get_config(i))
|
||||
else :
|
||||
self.assertFalse(k.get_config(i))
|
||||
|
||||
self.assertNotEqual(self.site.get_eid(), self.site2.get_eid())
|
||||
self.assertNotEqual(self.site3.get_eid(), self.site4.get_eid())
|
||||
|
||||
for i, j in zip([self.site, self.site2, self.site3, self.site4], \
|
||||
[master_port, client_port, master_port, client_port]) :
|
||||
addr = i.get_address()
|
||||
self.assertEqual(addr, ("127.0.0.1", j))
|
||||
|
||||
for i in [self.site, self.site2] :
|
||||
self.assertEqual(i.get_address(),
|
||||
self.dbenvMaster.repmgr_site_by_eid(i.get_eid()).get_address())
|
||||
for i in [self.site3, self.site4] :
|
||||
self.assertEqual(i.get_address(),
|
||||
self.dbenvClient.repmgr_site_by_eid(i.get_eid()).get_address())
|
||||
else :
|
||||
self.dbenvMaster.repmgr_set_local_site("127.0.0.1", master_port)
|
||||
self.dbenvClient.repmgr_set_local_site("127.0.0.1", client_port)
|
||||
self.dbenvMaster.repmgr_add_remote_site("127.0.0.1", client_port)
|
||||
self.dbenvClient.repmgr_add_remote_site("127.0.0.1", master_port)
|
||||
|
||||
self.dbenvMaster.rep_set_nsites(2)
|
||||
self.dbenvClient.rep_set_nsites(2)
|
||||
|
||||
self.dbenvMaster.rep_set_priority(10)
|
||||
self.dbenvClient.rep_set_priority(0)
|
||||
|
||||
self.dbenvMaster.rep_set_timeout(db.DB_REP_CONNECTION_RETRY,100123)
|
||||
self.dbenvClient.rep_set_timeout(db.DB_REP_CONNECTION_RETRY,100321)
|
||||
self.assertEqual(self.dbenvMaster.rep_get_timeout(
|
||||
db.DB_REP_CONNECTION_RETRY), 100123)
|
||||
self.assertEqual(self.dbenvClient.rep_get_timeout(
|
||||
db.DB_REP_CONNECTION_RETRY), 100321)
|
||||
|
||||
self.dbenvMaster.rep_set_timeout(db.DB_REP_ELECTION_TIMEOUT, 100234)
|
||||
self.dbenvClient.rep_set_timeout(db.DB_REP_ELECTION_TIMEOUT, 100432)
|
||||
self.assertEqual(self.dbenvMaster.rep_get_timeout(
|
||||
db.DB_REP_ELECTION_TIMEOUT), 100234)
|
||||
self.assertEqual(self.dbenvClient.rep_get_timeout(
|
||||
db.DB_REP_ELECTION_TIMEOUT), 100432)
|
||||
|
||||
self.dbenvMaster.rep_set_timeout(db.DB_REP_ELECTION_RETRY, 100345)
|
||||
self.dbenvClient.rep_set_timeout(db.DB_REP_ELECTION_RETRY, 100543)
|
||||
self.assertEqual(self.dbenvMaster.rep_get_timeout(
|
||||
db.DB_REP_ELECTION_RETRY), 100345)
|
||||
self.assertEqual(self.dbenvClient.rep_get_timeout(
|
||||
db.DB_REP_ELECTION_RETRY), 100543)
|
||||
|
||||
self.dbenvMaster.repmgr_set_ack_policy(db.DB_REPMGR_ACKS_ALL)
|
||||
self.dbenvClient.repmgr_set_ack_policy(db.DB_REPMGR_ACKS_ALL)
|
||||
|
||||
self.dbenvMaster.repmgr_start(1, db.DB_REP_MASTER);
|
||||
self.dbenvClient.repmgr_start(1, db.DB_REP_CLIENT);
|
||||
|
||||
self.assertEqual(self.dbenvMaster.rep_get_nsites(),2)
|
||||
self.assertEqual(self.dbenvClient.rep_get_nsites(),2)
|
||||
self.assertEqual(self.dbenvMaster.rep_get_priority(),10)
|
||||
self.assertEqual(self.dbenvClient.rep_get_priority(),0)
|
||||
self.assertEqual(self.dbenvMaster.repmgr_get_ack_policy(),
|
||||
db.DB_REPMGR_ACKS_ALL)
|
||||
self.assertEqual(self.dbenvClient.repmgr_get_ack_policy(),
|
||||
db.DB_REPMGR_ACKS_ALL)
|
||||
|
||||
# The timeout is necessary in BDB 4.5, since DB_EVENT_REP_STARTUPDONE
|
||||
# is not generated if the master has no new transactions.
|
||||
# This is solved in BDB 4.6 (#15542).
|
||||
import time
|
||||
timeout = time.time()+60
|
||||
while (time.time()<timeout) and not (self.confirmed_master and self.client_startupdone) :
|
||||
time.sleep(0.02)
|
||||
# self.client_startupdone does not always get set to True within
|
||||
# the timeout. On windows this may be a deep issue, on other
|
||||
# platforms it is likely just a timing issue, especially on slow
|
||||
# virthost buildbots (see issue 3892 for more). Even though
|
||||
# the timeout triggers, the rest of this test method usually passes
|
||||
# (but not all of it always, see below). So we just note the
|
||||
# timeout on stderr and keep soldering on.
|
||||
if time.time()>timeout:
|
||||
import sys
|
||||
print >> sys.stderr, ("XXX: timeout happened before"
|
||||
"startup was confirmed - see issue 3892")
|
||||
startup_timeout = True
|
||||
|
||||
d = self.dbenvMaster.repmgr_site_list()
|
||||
self.assertEqual(len(d), 1)
|
||||
d = d.values()[0] # There is only one
|
||||
self.assertEqual(d[0], "127.0.0.1")
|
||||
self.assertEqual(d[1], client_port)
|
||||
self.assertTrue((d[2]==db.DB_REPMGR_CONNECTED) or \
|
||||
(d[2]==db.DB_REPMGR_DISCONNECTED))
|
||||
|
||||
d = self.dbenvClient.repmgr_site_list()
|
||||
self.assertEqual(len(d), 1)
|
||||
d = d.values()[0] # There is only one
|
||||
self.assertEqual(d[0], "127.0.0.1")
|
||||
self.assertEqual(d[1], master_port)
|
||||
self.assertTrue((d[2]==db.DB_REPMGR_CONNECTED) or \
|
||||
(d[2]==db.DB_REPMGR_DISCONNECTED))
|
||||
|
||||
if db.version() >= (4,6) :
|
||||
d = self.dbenvMaster.repmgr_stat(flags=db.DB_STAT_CLEAR);
|
||||
self.assertTrue("msgs_queued" in d)
|
||||
|
||||
self.dbMaster=db.DB(self.dbenvMaster)
|
||||
txn=self.dbenvMaster.txn_begin()
|
||||
self.dbMaster.open("test", db.DB_HASH, db.DB_CREATE, 0666, txn=txn)
|
||||
txn.commit()
|
||||
|
||||
import time,os.path
|
||||
timeout=time.time()+10
|
||||
while (time.time()<timeout) and \
|
||||
not (os.path.exists(os.path.join(self.homeDirClient,"test"))) :
|
||||
time.sleep(0.01)
|
||||
|
||||
self.dbClient=db.DB(self.dbenvClient)
|
||||
while True :
|
||||
txn=self.dbenvClient.txn_begin()
|
||||
try :
|
||||
self.dbClient.open("test", db.DB_HASH, flags=db.DB_RDONLY,
|
||||
mode=0666, txn=txn)
|
||||
except db.DBRepHandleDeadError :
|
||||
txn.abort()
|
||||
self.dbClient.close()
|
||||
self.dbClient=db.DB(self.dbenvClient)
|
||||
continue
|
||||
|
||||
txn.commit()
|
||||
break
|
||||
|
||||
txn=self.dbenvMaster.txn_begin()
|
||||
self.dbMaster.put("ABC", "123", txn=txn)
|
||||
txn.commit()
|
||||
import time
|
||||
timeout=time.time()+10
|
||||
v=None
|
||||
while (time.time()<timeout) and (v is None) :
|
||||
txn=self.dbenvClient.txn_begin()
|
||||
v=self.dbClient.get("ABC", txn=txn)
|
||||
txn.commit()
|
||||
if v is None :
|
||||
time.sleep(0.02)
|
||||
# If startup did not happen before the timeout above, then this test
|
||||
# sometimes fails. This happens randomly, which causes buildbot
|
||||
# instability, but all the other bsddb tests pass. Since bsddb3 in the
|
||||
# stdlib is currently not getting active maintenance, and is gone in
|
||||
# py3k, we just skip the end of the test in that case.
|
||||
if time.time()>=timeout and startup_timeout:
|
||||
self.skipTest("replication test skipped due to random failure, "
|
||||
"see issue 3892")
|
||||
self.assertTrue(time.time()<timeout)
|
||||
self.assertEqual("123", v)
|
||||
|
||||
txn=self.dbenvMaster.txn_begin()
|
||||
self.dbMaster.delete("ABC", txn=txn)
|
||||
txn.commit()
|
||||
timeout=time.time()+10
|
||||
while (time.time()<timeout) and (v is not None) :
|
||||
txn=self.dbenvClient.txn_begin()
|
||||
v=self.dbClient.get("ABC", txn=txn)
|
||||
txn.commit()
|
||||
if v is None :
|
||||
time.sleep(0.02)
|
||||
self.assertTrue(time.time()<timeout)
|
||||
self.assertEqual(None, v)
|
||||
|
||||
class DBBaseReplication(DBReplication) :
|
||||
def setUp(self) :
|
||||
DBReplication.setUp(self)
|
||||
def confirmed_master(a,b,c) :
|
||||
if (b == db.DB_EVENT_REP_MASTER) or (b == db.DB_EVENT_REP_ELECTED) :
|
||||
self.confirmed_master = True
|
||||
|
||||
def client_startupdone(a,b,c) :
|
||||
if b == db.DB_EVENT_REP_STARTUPDONE :
|
||||
self.client_startupdone = True
|
||||
|
||||
self.dbenvMaster.set_event_notify(confirmed_master)
|
||||
self.dbenvClient.set_event_notify(client_startupdone)
|
||||
|
||||
import Queue
|
||||
self.m2c = Queue.Queue()
|
||||
self.c2m = Queue.Queue()
|
||||
|
||||
# There are only two nodes, so we don't need to
|
||||
# do any routing decision
|
||||
def m2c(dbenv, control, rec, lsnp, envid, flags) :
|
||||
self.m2c.put((control, rec))
|
||||
|
||||
def c2m(dbenv, control, rec, lsnp, envid, flags) :
|
||||
self.c2m.put((control, rec))
|
||||
|
||||
self.dbenvMaster.rep_set_transport(13,m2c)
|
||||
self.dbenvMaster.rep_set_priority(10)
|
||||
self.dbenvClient.rep_set_transport(3,c2m)
|
||||
self.dbenvClient.rep_set_priority(0)
|
||||
|
||||
self.assertEqual(self.dbenvMaster.rep_get_priority(),10)
|
||||
self.assertEqual(self.dbenvClient.rep_get_priority(),0)
|
||||
|
||||
#self.dbenvMaster.set_verbose(db.DB_VERB_REPLICATION, True)
|
||||
#self.dbenvMaster.set_verbose(db.DB_VERB_FILEOPS_ALL, True)
|
||||
#self.dbenvClient.set_verbose(db.DB_VERB_REPLICATION, True)
|
||||
#self.dbenvClient.set_verbose(db.DB_VERB_FILEOPS_ALL, True)
|
||||
|
||||
def thread_master() :
|
||||
return self.thread_do(self.dbenvMaster, self.c2m, 3,
|
||||
self.master_doing_election, True)
|
||||
|
||||
def thread_client() :
|
||||
return self.thread_do(self.dbenvClient, self.m2c, 13,
|
||||
self.client_doing_election, False)
|
||||
|
||||
from threading import Thread
|
||||
t_m=Thread(target=thread_master)
|
||||
t_c=Thread(target=thread_client)
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
t_m.setDaemon(True)
|
||||
t_c.setDaemon(True)
|
||||
else :
|
||||
t_m.daemon = True
|
||||
t_c.daemon = True
|
||||
|
||||
self.t_m = t_m
|
||||
self.t_c = t_c
|
||||
|
||||
self.dbMaster = self.dbClient = None
|
||||
|
||||
self.master_doing_election=[False]
|
||||
self.client_doing_election=[False]
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
if self.dbClient :
|
||||
self.dbClient.close()
|
||||
if self.dbMaster :
|
||||
self.dbMaster.close()
|
||||
self.m2c.put(None)
|
||||
self.c2m.put(None)
|
||||
self.t_m.join()
|
||||
self.t_c.join()
|
||||
|
||||
# Here we assign dummy event handlers to allow GC of the test object.
|
||||
# Since the dummy handler doesn't use any outer scope variable, it
|
||||
# doesn't keep any reference to the test object.
|
||||
def dummy(*args) :
|
||||
pass
|
||||
self.dbenvMaster.set_event_notify(dummy)
|
||||
self.dbenvClient.set_event_notify(dummy)
|
||||
self.dbenvMaster.rep_set_transport(13,dummy)
|
||||
self.dbenvClient.rep_set_transport(3,dummy)
|
||||
|
||||
self.dbenvClient.close()
|
||||
self.dbenvMaster.close()
|
||||
test_support.rmtree(self.homeDirClient)
|
||||
test_support.rmtree(self.homeDirMaster)
|
||||
|
||||
def basic_rep_threading(self) :
|
||||
self.dbenvMaster.rep_start(flags=db.DB_REP_MASTER)
|
||||
self.dbenvClient.rep_start(flags=db.DB_REP_CLIENT)
|
||||
|
||||
def thread_do(env, q, envid, election_status, must_be_master) :
|
||||
while True :
|
||||
v=q.get()
|
||||
if v is None : return
|
||||
env.rep_process_message(v[0], v[1], envid)
|
||||
|
||||
self.thread_do = thread_do
|
||||
|
||||
self.t_m.start()
|
||||
self.t_c.start()
|
||||
|
||||
def test01_basic_replication(self) :
|
||||
self.basic_rep_threading()
|
||||
|
||||
# The timeout is necessary in BDB 4.5, since DB_EVENT_REP_STARTUPDONE
|
||||
# is not generated if the master has no new transactions.
|
||||
# This is solved in BDB 4.6 (#15542).
|
||||
import time
|
||||
timeout = time.time()+60
|
||||
while (time.time()<timeout) and not (self.confirmed_master and
|
||||
self.client_startupdone) :
|
||||
time.sleep(0.02)
|
||||
self.assertTrue(time.time()<timeout)
|
||||
|
||||
self.dbMaster=db.DB(self.dbenvMaster)
|
||||
txn=self.dbenvMaster.txn_begin()
|
||||
self.dbMaster.open("test", db.DB_HASH, db.DB_CREATE, 0666, txn=txn)
|
||||
txn.commit()
|
||||
|
||||
import time,os.path
|
||||
timeout=time.time()+10
|
||||
while (time.time()<timeout) and \
|
||||
not (os.path.exists(os.path.join(self.homeDirClient,"test"))) :
|
||||
time.sleep(0.01)
|
||||
|
||||
self.dbClient=db.DB(self.dbenvClient)
|
||||
while True :
|
||||
txn=self.dbenvClient.txn_begin()
|
||||
try :
|
||||
self.dbClient.open("test", db.DB_HASH, flags=db.DB_RDONLY,
|
||||
mode=0666, txn=txn)
|
||||
except db.DBRepHandleDeadError :
|
||||
txn.abort()
|
||||
self.dbClient.close()
|
||||
self.dbClient=db.DB(self.dbenvClient)
|
||||
continue
|
||||
|
||||
txn.commit()
|
||||
break
|
||||
|
||||
d = self.dbenvMaster.rep_stat(flags=db.DB_STAT_CLEAR);
|
||||
self.assertTrue("master_changes" in d)
|
||||
|
||||
txn=self.dbenvMaster.txn_begin()
|
||||
self.dbMaster.put("ABC", "123", txn=txn)
|
||||
txn.commit()
|
||||
import time
|
||||
timeout=time.time()+10
|
||||
v=None
|
||||
while (time.time()<timeout) and (v is None) :
|
||||
txn=self.dbenvClient.txn_begin()
|
||||
v=self.dbClient.get("ABC", txn=txn)
|
||||
txn.commit()
|
||||
if v is None :
|
||||
time.sleep(0.02)
|
||||
self.assertTrue(time.time()<timeout)
|
||||
self.assertEqual("123", v)
|
||||
|
||||
txn=self.dbenvMaster.txn_begin()
|
||||
self.dbMaster.delete("ABC", txn=txn)
|
||||
txn.commit()
|
||||
timeout=time.time()+10
|
||||
while (time.time()<timeout) and (v is not None) :
|
||||
txn=self.dbenvClient.txn_begin()
|
||||
v=self.dbClient.get("ABC", txn=txn)
|
||||
txn.commit()
|
||||
if v is None :
|
||||
time.sleep(0.02)
|
||||
self.assertTrue(time.time()<timeout)
|
||||
self.assertEqual(None, v)
|
||||
|
||||
if db.version() >= (4,7) :
|
||||
def test02_test_request(self) :
|
||||
self.basic_rep_threading()
|
||||
(minimum, maximum) = self.dbenvClient.rep_get_request()
|
||||
self.dbenvClient.rep_set_request(minimum-1, maximum+1)
|
||||
self.assertEqual(self.dbenvClient.rep_get_request(),
|
||||
(minimum-1, maximum+1))
|
||||
|
||||
if db.version() >= (4,6) :
|
||||
def test03_master_election(self) :
|
||||
# Get ready to hold an election
|
||||
#self.dbenvMaster.rep_start(flags=db.DB_REP_MASTER)
|
||||
self.dbenvMaster.rep_start(flags=db.DB_REP_CLIENT)
|
||||
self.dbenvClient.rep_start(flags=db.DB_REP_CLIENT)
|
||||
|
||||
def thread_do(env, q, envid, election_status, must_be_master) :
|
||||
while True :
|
||||
v=q.get()
|
||||
if v is None : return
|
||||
r = env.rep_process_message(v[0],v[1],envid)
|
||||
if must_be_master and self.confirmed_master :
|
||||
self.dbenvMaster.rep_start(flags = db.DB_REP_MASTER)
|
||||
must_be_master = False
|
||||
|
||||
if r[0] == db.DB_REP_HOLDELECTION :
|
||||
def elect() :
|
||||
while True :
|
||||
try :
|
||||
env.rep_elect(2, 1)
|
||||
election_status[0] = False
|
||||
break
|
||||
except db.DBRepUnavailError :
|
||||
pass
|
||||
if not election_status[0] and not self.confirmed_master :
|
||||
from threading import Thread
|
||||
election_status[0] = True
|
||||
t=Thread(target=elect)
|
||||
import sys
|
||||
if sys.version_info[0] < 3 :
|
||||
t.setDaemon(True)
|
||||
else :
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
self.thread_do = thread_do
|
||||
|
||||
self.t_m.start()
|
||||
self.t_c.start()
|
||||
|
||||
self.dbenvMaster.rep_set_timeout(db.DB_REP_ELECTION_TIMEOUT, 50000)
|
||||
self.dbenvClient.rep_set_timeout(db.DB_REP_ELECTION_TIMEOUT, 50000)
|
||||
self.client_doing_election[0] = True
|
||||
while True :
|
||||
try :
|
||||
self.dbenvClient.rep_elect(2, 1)
|
||||
self.client_doing_election[0] = False
|
||||
break
|
||||
except db.DBRepUnavailError :
|
||||
pass
|
||||
|
||||
self.assertTrue(self.confirmed_master)
|
||||
|
||||
# Race condition showed up after upgrading to Solaris 10 Update 10
|
||||
# https://forums.oracle.com/forums/thread.jspa?messageID=9902860
|
||||
# jcea@jcea.es: See private email from Paula Bingham (Oracle),
|
||||
# in 20110929.
|
||||
while not (self.dbenvClient.rep_stat()["startup_complete"]) :
|
||||
pass
|
||||
|
||||
if db.version() >= (4,7) :
|
||||
def test04_test_clockskew(self) :
|
||||
fast, slow = 1234, 1230
|
||||
self.dbenvMaster.rep_set_clockskew(fast, slow)
|
||||
self.assertEqual((fast, slow),
|
||||
self.dbenvMaster.rep_get_clockskew())
|
||||
self.basic_rep_threading()
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
if db.version() >= (4, 6) :
|
||||
dbenv = db.DBEnv()
|
||||
try :
|
||||
dbenv.repmgr_get_ack_policy()
|
||||
ReplicationManager_available=True
|
||||
except :
|
||||
ReplicationManager_available=False
|
||||
dbenv.close()
|
||||
del dbenv
|
||||
if ReplicationManager_available :
|
||||
suite.addTest(unittest.makeSuite(DBReplicationManager))
|
||||
|
||||
if have_threads :
|
||||
suite.addTest(unittest.makeSuite(DBBaseReplication))
|
||||
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
import unittest
|
||||
import os
|
||||
|
||||
from test_all import db, test_support, get_new_environment_path, get_new_database_path
|
||||
|
||||
|
||||
class DBSequenceTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.int_32_max = 0x100000000
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.filename = "test"
|
||||
|
||||
self.dbenv = db.DBEnv()
|
||||
self.dbenv.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL, 0666)
|
||||
self.d = db.DB(self.dbenv)
|
||||
self.d.open(self.filename, db.DB_BTREE, db.DB_CREATE, 0666)
|
||||
|
||||
def tearDown(self):
|
||||
if hasattr(self, 'seq'):
|
||||
self.seq.close()
|
||||
del self.seq
|
||||
if hasattr(self, 'd'):
|
||||
self.d.close()
|
||||
del self.d
|
||||
if hasattr(self, 'dbenv'):
|
||||
self.dbenv.close()
|
||||
del self.dbenv
|
||||
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def test_get(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
start_value = 10 * self.int_32_max
|
||||
self.assertEqual(0xA00000000, start_value)
|
||||
self.assertEqual(None, self.seq.initial_value(start_value))
|
||||
self.assertEqual(None, self.seq.open(key='id', txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(start_value, self.seq.get(5))
|
||||
self.assertEqual(start_value + 5, self.seq.get())
|
||||
|
||||
def test_remove(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(None, self.seq.remove(txn=None, flags=0))
|
||||
del self.seq
|
||||
|
||||
def test_get_key(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
key = 'foo'
|
||||
self.assertEqual(None, self.seq.open(key=key, txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(key, self.seq.get_key())
|
||||
|
||||
def test_get_dbp(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(self.d, self.seq.get_dbp())
|
||||
|
||||
def test_cachesize(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
cashe_size = 10
|
||||
self.assertEqual(None, self.seq.set_cachesize(cashe_size))
|
||||
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(cashe_size, self.seq.get_cachesize())
|
||||
|
||||
def test_flags(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
flag = db.DB_SEQ_WRAP;
|
||||
self.assertEqual(None, self.seq.set_flags(flag))
|
||||
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(flag, self.seq.get_flags() & flag)
|
||||
|
||||
def test_range(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
seq_range = (10 * self.int_32_max, 11 * self.int_32_max - 1)
|
||||
self.assertEqual(None, self.seq.set_range(seq_range))
|
||||
self.seq.initial_value(seq_range[0])
|
||||
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
|
||||
self.assertEqual(seq_range, self.seq.get_range())
|
||||
|
||||
def test_stat(self):
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
|
||||
stat = self.seq.stat()
|
||||
for param in ('nowait', 'min', 'max', 'value', 'current',
|
||||
'flags', 'cache_size', 'last_value', 'wait'):
|
||||
self.assertTrue(param in stat, "parameter %s isn't in stat info" % param)
|
||||
|
||||
if db.version() >= (4,7) :
|
||||
# This code checks a crash solved in Berkeley DB 4.7
|
||||
def test_stat_crash(self) :
|
||||
d=db.DB()
|
||||
d.open(None,dbtype=db.DB_HASH,flags=db.DB_CREATE) # In RAM
|
||||
seq = db.DBSequence(d, flags=0)
|
||||
|
||||
self.assertRaises(db.DBNotFoundError, seq.open,
|
||||
key='id', txn=None, flags=0)
|
||||
|
||||
self.assertRaises(db.DBInvalidArgError, seq.stat)
|
||||
|
||||
d.close()
|
||||
|
||||
def test_64bits(self) :
|
||||
# We don't use both extremes because they are problematic
|
||||
value_plus=(1L<<63)-2
|
||||
self.assertEqual(9223372036854775806L,value_plus)
|
||||
value_minus=(-1L<<63)+1 # Two complement
|
||||
self.assertEqual(-9223372036854775807L,value_minus)
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
self.assertEqual(None, self.seq.initial_value(value_plus-1))
|
||||
self.assertEqual(None, self.seq.open(key='id', txn=None,
|
||||
flags=db.DB_CREATE))
|
||||
self.assertEqual(value_plus-1, self.seq.get(1))
|
||||
self.assertEqual(value_plus, self.seq.get(1))
|
||||
|
||||
self.seq.remove(txn=None, flags=0)
|
||||
|
||||
self.seq = db.DBSequence(self.d, flags=0)
|
||||
self.assertEqual(None, self.seq.initial_value(value_minus))
|
||||
self.assertEqual(None, self.seq.open(key='id', txn=None,
|
||||
flags=db.DB_CREATE))
|
||||
self.assertEqual(value_minus, self.seq.get(1))
|
||||
self.assertEqual(value_minus+1, self.seq.get(1))
|
||||
|
||||
def test_multiple_close(self):
|
||||
self.seq = db.DBSequence(self.d)
|
||||
self.seq.close() # You can close a Sequence multiple times
|
||||
self.seq.close()
|
||||
self.seq.close()
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(DBSequenceTest))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,517 +0,0 @@
|
|||
"""TestCases for multi-threaded access to a DB.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import errno
|
||||
from random import random
|
||||
|
||||
DASH = '-'
|
||||
|
||||
try:
|
||||
WindowsError
|
||||
except NameError:
|
||||
class WindowsError(Exception):
|
||||
pass
|
||||
|
||||
import unittest
|
||||
from test_all import db, dbutils, test_support, verbose, have_threads, \
|
||||
get_new_environment_path, get_new_database_path
|
||||
|
||||
if have_threads :
|
||||
from threading import Thread
|
||||
if sys.version_info[0] < 3 :
|
||||
from threading import currentThread
|
||||
else :
|
||||
from threading import current_thread as currentThread
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class BaseThreadedTestCase(unittest.TestCase):
|
||||
dbtype = db.DB_UNKNOWN # must be set in derived class
|
||||
dbopenflags = 0
|
||||
dbsetflags = 0
|
||||
envflags = 0
|
||||
|
||||
def setUp(self):
|
||||
if verbose:
|
||||
dbutils._deadlock_VerboseFile = sys.stdout
|
||||
|
||||
self.homeDir = get_new_environment_path()
|
||||
self.env = db.DBEnv()
|
||||
self.setEnvOpts()
|
||||
self.env.open(self.homeDir, self.envflags | db.DB_CREATE)
|
||||
|
||||
self.filename = self.__class__.__name__ + '.db'
|
||||
self.d = db.DB(self.env)
|
||||
if self.dbsetflags:
|
||||
self.d.set_flags(self.dbsetflags)
|
||||
self.d.open(self.filename, self.dbtype, self.dbopenflags|db.DB_CREATE)
|
||||
|
||||
def tearDown(self):
|
||||
self.d.close()
|
||||
self.env.close()
|
||||
test_support.rmtree(self.homeDir)
|
||||
|
||||
def setEnvOpts(self):
|
||||
pass
|
||||
|
||||
def makeData(self, key):
|
||||
return DASH.join([key] * 5)
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
class ConcurrentDataStoreBase(BaseThreadedTestCase):
|
||||
dbopenflags = db.DB_THREAD
|
||||
envflags = db.DB_THREAD | db.DB_INIT_CDB | db.DB_INIT_MPOOL
|
||||
readers = 0 # derived class should set
|
||||
writers = 0
|
||||
records = 1000
|
||||
|
||||
def test01_1WriterMultiReaders(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test01_1WriterMultiReaders..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
keys=range(self.records)
|
||||
import random
|
||||
random.shuffle(keys)
|
||||
records_per_writer=self.records//self.writers
|
||||
readers_per_writer=self.readers//self.writers
|
||||
self.assertEqual(self.records,self.writers*records_per_writer)
|
||||
self.assertEqual(self.readers,self.writers*readers_per_writer)
|
||||
self.assertTrue((records_per_writer%readers_per_writer)==0)
|
||||
readers = []
|
||||
|
||||
for x in xrange(self.readers):
|
||||
rt = Thread(target = self.readerThread,
|
||||
args = (self.d, x),
|
||||
name = 'reader %d' % x,
|
||||
)#verbose = verbose)
|
||||
if sys.version_info[0] < 3 :
|
||||
rt.setDaemon(True)
|
||||
else :
|
||||
rt.daemon = True
|
||||
readers.append(rt)
|
||||
|
||||
writers=[]
|
||||
for x in xrange(self.writers):
|
||||
a=keys[records_per_writer*x:records_per_writer*(x+1)]
|
||||
a.sort() # Generate conflicts
|
||||
b=readers[readers_per_writer*x:readers_per_writer*(x+1)]
|
||||
wt = Thread(target = self.writerThread,
|
||||
args = (self.d, a, b),
|
||||
name = 'writer %d' % x,
|
||||
)#verbose = verbose)
|
||||
writers.append(wt)
|
||||
|
||||
for t in writers:
|
||||
if sys.version_info[0] < 3 :
|
||||
t.setDaemon(True)
|
||||
else :
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
for t in writers:
|
||||
t.join()
|
||||
for t in readers:
|
||||
t.join()
|
||||
|
||||
def writerThread(self, d, keys, readers):
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
|
||||
if verbose:
|
||||
print "%s: creating records %d - %d" % (name, start, stop)
|
||||
|
||||
count=len(keys)//len(readers)
|
||||
count2=count
|
||||
for x in keys :
|
||||
key = '%04d' % x
|
||||
dbutils.DeadlockWrap(d.put, key, self.makeData(key),
|
||||
max_retries=12)
|
||||
if verbose and x % 100 == 0:
|
||||
print "%s: records %d - %d finished" % (name, start, x)
|
||||
|
||||
count2-=1
|
||||
if not count2 :
|
||||
readers.pop().start()
|
||||
count2=count
|
||||
|
||||
if verbose:
|
||||
print "%s: finished creating records" % name
|
||||
|
||||
if verbose:
|
||||
print "%s: thread finished" % name
|
||||
|
||||
def readerThread(self, d, readerNum):
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
|
||||
for i in xrange(5) :
|
||||
c = d.cursor()
|
||||
count = 0
|
||||
rec = c.first()
|
||||
while rec:
|
||||
count += 1
|
||||
key, data = rec
|
||||
self.assertEqual(self.makeData(key), data)
|
||||
rec = c.next()
|
||||
if verbose:
|
||||
print "%s: found %d records" % (name, count)
|
||||
c.close()
|
||||
|
||||
if verbose:
|
||||
print "%s: thread finished" % name
|
||||
|
||||
|
||||
class BTreeConcurrentDataStore(ConcurrentDataStoreBase):
|
||||
dbtype = db.DB_BTREE
|
||||
writers = 2
|
||||
readers = 10
|
||||
records = 1000
|
||||
|
||||
|
||||
class HashConcurrentDataStore(ConcurrentDataStoreBase):
|
||||
dbtype = db.DB_HASH
|
||||
writers = 2
|
||||
readers = 10
|
||||
records = 1000
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
class SimpleThreadedBase(BaseThreadedTestCase):
|
||||
dbopenflags = db.DB_THREAD
|
||||
envflags = db.DB_THREAD | db.DB_INIT_MPOOL | db.DB_INIT_LOCK
|
||||
readers = 10
|
||||
writers = 2
|
||||
records = 1000
|
||||
|
||||
def setEnvOpts(self):
|
||||
self.env.set_lk_detect(db.DB_LOCK_DEFAULT)
|
||||
|
||||
def test02_SimpleLocks(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test02_SimpleLocks..." % self.__class__.__name__
|
||||
|
||||
|
||||
keys=range(self.records)
|
||||
import random
|
||||
random.shuffle(keys)
|
||||
records_per_writer=self.records//self.writers
|
||||
readers_per_writer=self.readers//self.writers
|
||||
self.assertEqual(self.records,self.writers*records_per_writer)
|
||||
self.assertEqual(self.readers,self.writers*readers_per_writer)
|
||||
self.assertTrue((records_per_writer%readers_per_writer)==0)
|
||||
|
||||
readers = []
|
||||
for x in xrange(self.readers):
|
||||
rt = Thread(target = self.readerThread,
|
||||
args = (self.d, x),
|
||||
name = 'reader %d' % x,
|
||||
)#verbose = verbose)
|
||||
if sys.version_info[0] < 3 :
|
||||
rt.setDaemon(True)
|
||||
else :
|
||||
rt.daemon = True
|
||||
readers.append(rt)
|
||||
|
||||
writers = []
|
||||
for x in xrange(self.writers):
|
||||
a=keys[records_per_writer*x:records_per_writer*(x+1)]
|
||||
a.sort() # Generate conflicts
|
||||
b=readers[readers_per_writer*x:readers_per_writer*(x+1)]
|
||||
wt = Thread(target = self.writerThread,
|
||||
args = (self.d, a, b),
|
||||
name = 'writer %d' % x,
|
||||
)#verbose = verbose)
|
||||
writers.append(wt)
|
||||
|
||||
for t in writers:
|
||||
if sys.version_info[0] < 3 :
|
||||
t.setDaemon(True)
|
||||
else :
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
for t in writers:
|
||||
t.join()
|
||||
for t in readers:
|
||||
t.join()
|
||||
|
||||
def writerThread(self, d, keys, readers):
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
if verbose:
|
||||
print "%s: creating records %d - %d" % (name, start, stop)
|
||||
|
||||
count=len(keys)//len(readers)
|
||||
count2=count
|
||||
for x in keys :
|
||||
key = '%04d' % x
|
||||
dbutils.DeadlockWrap(d.put, key, self.makeData(key),
|
||||
max_retries=12)
|
||||
|
||||
if verbose and x % 100 == 0:
|
||||
print "%s: records %d - %d finished" % (name, start, x)
|
||||
|
||||
count2-=1
|
||||
if not count2 :
|
||||
readers.pop().start()
|
||||
count2=count
|
||||
|
||||
if verbose:
|
||||
print "%s: thread finished" % name
|
||||
|
||||
def readerThread(self, d, readerNum):
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
|
||||
c = d.cursor()
|
||||
count = 0
|
||||
rec = dbutils.DeadlockWrap(c.first, max_retries=10)
|
||||
while rec:
|
||||
count += 1
|
||||
key, data = rec
|
||||
self.assertEqual(self.makeData(key), data)
|
||||
rec = dbutils.DeadlockWrap(c.next, max_retries=10)
|
||||
if verbose:
|
||||
print "%s: found %d records" % (name, count)
|
||||
c.close()
|
||||
|
||||
if verbose:
|
||||
print "%s: thread finished" % name
|
||||
|
||||
|
||||
class BTreeSimpleThreaded(SimpleThreadedBase):
|
||||
dbtype = db.DB_BTREE
|
||||
|
||||
|
||||
class HashSimpleThreaded(SimpleThreadedBase):
|
||||
dbtype = db.DB_HASH
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
|
||||
class ThreadedTransactionsBase(BaseThreadedTestCase):
|
||||
dbopenflags = db.DB_THREAD | db.DB_AUTO_COMMIT
|
||||
envflags = (db.DB_THREAD |
|
||||
db.DB_INIT_MPOOL |
|
||||
db.DB_INIT_LOCK |
|
||||
db.DB_INIT_LOG |
|
||||
db.DB_INIT_TXN
|
||||
)
|
||||
readers = 0
|
||||
writers = 0
|
||||
records = 2000
|
||||
txnFlag = 0
|
||||
|
||||
def setEnvOpts(self):
|
||||
#self.env.set_lk_detect(db.DB_LOCK_DEFAULT)
|
||||
pass
|
||||
|
||||
def test03_ThreadedTransactions(self):
|
||||
if verbose:
|
||||
print '\n', '-=' * 30
|
||||
print "Running %s.test03_ThreadedTransactions..." % \
|
||||
self.__class__.__name__
|
||||
|
||||
keys=range(self.records)
|
||||
import random
|
||||
random.shuffle(keys)
|
||||
records_per_writer=self.records//self.writers
|
||||
readers_per_writer=self.readers//self.writers
|
||||
self.assertEqual(self.records,self.writers*records_per_writer)
|
||||
self.assertEqual(self.readers,self.writers*readers_per_writer)
|
||||
self.assertTrue((records_per_writer%readers_per_writer)==0)
|
||||
|
||||
readers=[]
|
||||
for x in xrange(self.readers):
|
||||
rt = Thread(target = self.readerThread,
|
||||
args = (self.d, x),
|
||||
name = 'reader %d' % x,
|
||||
)#verbose = verbose)
|
||||
if sys.version_info[0] < 3 :
|
||||
rt.setDaemon(True)
|
||||
else :
|
||||
rt.daemon = True
|
||||
readers.append(rt)
|
||||
|
||||
writers = []
|
||||
for x in xrange(self.writers):
|
||||
a=keys[records_per_writer*x:records_per_writer*(x+1)]
|
||||
b=readers[readers_per_writer*x:readers_per_writer*(x+1)]
|
||||
wt = Thread(target = self.writerThread,
|
||||
args = (self.d, a, b),
|
||||
name = 'writer %d' % x,
|
||||
)#verbose = verbose)
|
||||
writers.append(wt)
|
||||
|
||||
dt = Thread(target = self.deadlockThread)
|
||||
if sys.version_info[0] < 3 :
|
||||
dt.setDaemon(True)
|
||||
else :
|
||||
dt.daemon = True
|
||||
dt.start()
|
||||
|
||||
for t in writers:
|
||||
if sys.version_info[0] < 3 :
|
||||
t.setDaemon(True)
|
||||
else :
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
for t in writers:
|
||||
t.join()
|
||||
for t in readers:
|
||||
t.join()
|
||||
|
||||
self.doLockDetect = False
|
||||
dt.join()
|
||||
|
||||
def writerThread(self, d, keys, readers):
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
|
||||
count=len(keys)//len(readers)
|
||||
while len(keys):
|
||||
try:
|
||||
txn = self.env.txn_begin(None, self.txnFlag)
|
||||
keys2=keys[:count]
|
||||
for x in keys2 :
|
||||
key = '%04d' % x
|
||||
d.put(key, self.makeData(key), txn)
|
||||
if verbose and x % 100 == 0:
|
||||
print "%s: records %d - %d finished" % (name, start, x)
|
||||
txn.commit()
|
||||
keys=keys[count:]
|
||||
readers.pop().start()
|
||||
except (db.DBLockDeadlockError, db.DBLockNotGrantedError), val:
|
||||
if verbose:
|
||||
if sys.version_info < (2, 6) :
|
||||
print "%s: Aborting transaction (%s)" % (name, val[1])
|
||||
else :
|
||||
print "%s: Aborting transaction (%s)" % (name,
|
||||
val.args[1])
|
||||
txn.abort()
|
||||
|
||||
if verbose:
|
||||
print "%s: thread finished" % name
|
||||
|
||||
def readerThread(self, d, readerNum):
|
||||
if sys.version_info[0] < 3 :
|
||||
name = currentThread().getName()
|
||||
else :
|
||||
name = currentThread().name
|
||||
|
||||
finished = False
|
||||
while not finished:
|
||||
try:
|
||||
txn = self.env.txn_begin(None, self.txnFlag)
|
||||
c = d.cursor(txn)
|
||||
count = 0
|
||||
rec = c.first()
|
||||
while rec:
|
||||
count += 1
|
||||
key, data = rec
|
||||
self.assertEqual(self.makeData(key), data)
|
||||
rec = c.next()
|
||||
if verbose: print "%s: found %d records" % (name, count)
|
||||
c.close()
|
||||
txn.commit()
|
||||
finished = True
|
||||
except (db.DBLockDeadlockError, db.DBLockNotGrantedError), val:
|
||||
if verbose:
|
||||
if sys.version_info < (2, 6) :
|
||||
print "%s: Aborting transaction (%s)" % (name, val[1])
|
||||
else :
|
||||
print "%s: Aborting transaction (%s)" % (name,
|
||||
val.args[1])
|
||||
c.close()
|
||||
txn.abort()
|
||||
|
||||
if verbose:
|
||||
print "%s: thread finished" % name
|
||||
|
||||
def deadlockThread(self):
|
||||
self.doLockDetect = True
|
||||
while self.doLockDetect:
|
||||
time.sleep(0.05)
|
||||
try:
|
||||
aborted = self.env.lock_detect(
|
||||
db.DB_LOCK_RANDOM, db.DB_LOCK_CONFLICT)
|
||||
if verbose and aborted:
|
||||
print "deadlock: Aborted %d deadlocked transaction(s)" \
|
||||
% aborted
|
||||
except db.DBError:
|
||||
pass
|
||||
|
||||
|
||||
class BTreeThreadedTransactions(ThreadedTransactionsBase):
|
||||
dbtype = db.DB_BTREE
|
||||
writers = 2
|
||||
readers = 10
|
||||
records = 1000
|
||||
|
||||
class HashThreadedTransactions(ThreadedTransactionsBase):
|
||||
dbtype = db.DB_HASH
|
||||
writers = 2
|
||||
readers = 10
|
||||
records = 1000
|
||||
|
||||
class BTreeThreadedNoWaitTransactions(ThreadedTransactionsBase):
|
||||
dbtype = db.DB_BTREE
|
||||
writers = 2
|
||||
readers = 10
|
||||
records = 1000
|
||||
txnFlag = db.DB_TXN_NOWAIT
|
||||
|
||||
class HashThreadedNoWaitTransactions(ThreadedTransactionsBase):
|
||||
dbtype = db.DB_HASH
|
||||
writers = 2
|
||||
readers = 10
|
||||
records = 1000
|
||||
txnFlag = db.DB_TXN_NOWAIT
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
if have_threads:
|
||||
suite.addTest(unittest.makeSuite(BTreeConcurrentDataStore))
|
||||
suite.addTest(unittest.makeSuite(HashConcurrentDataStore))
|
||||
suite.addTest(unittest.makeSuite(BTreeSimpleThreaded))
|
||||
suite.addTest(unittest.makeSuite(HashSimpleThreaded))
|
||||
suite.addTest(unittest.makeSuite(BTreeThreadedTransactions))
|
||||
suite.addTest(unittest.makeSuite(HashThreadedTransactions))
|
||||
suite.addTest(unittest.makeSuite(BTreeThreadedNoWaitTransactions))
|
||||
suite.addTest(unittest.makeSuite(HashThreadedNoWaitTransactions))
|
||||
|
||||
else:
|
||||
print "Threads not available, skipping thread tests."
|
||||
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(defaultTest='test_suite')
|
||||
|
|
@ -1,699 +0,0 @@
|
|||
__all__ = ['Counter', 'deque', 'defaultdict', 'namedtuple', 'OrderedDict']
|
||||
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
|
||||
# They should however be considered an integral part of collections.py.
|
||||
from _abcoll import *
|
||||
import _abcoll
|
||||
__all__ += _abcoll.__all__
|
||||
|
||||
from _collections import deque, defaultdict
|
||||
from operator import itemgetter as _itemgetter, eq as _eq
|
||||
from keyword import iskeyword as _iskeyword
|
||||
import sys as _sys
|
||||
import heapq as _heapq
|
||||
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
|
||||
from itertools import imap as _imap
|
||||
|
||||
try:
|
||||
from thread import get_ident as _get_ident
|
||||
except ImportError:
|
||||
from dummy_thread import get_ident as _get_ident
|
||||
|
||||
|
||||
################################################################################
|
||||
### OrderedDict
|
||||
################################################################################
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as regular dictionaries.
|
||||
|
||||
# The internal self.__map dict maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. The signature is the same as
|
||||
regular dictionaries, but keyword arguments are not recommended because
|
||||
their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__root = root = [] # sentinel node
|
||||
root[:] = [root, root, None]
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link at the end of the linked list,
|
||||
# and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
root = self.__root
|
||||
last = root[0]
|
||||
last[1] = root[0] = self.__map[key] = [last, root, key]
|
||||
return dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which gets
|
||||
# removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link_prev, link_next, _ = self.__map.pop(key)
|
||||
link_prev[1] = link_next # update link_prev[NEXT]
|
||||
link_next[0] = link_prev # update link_next[PREV]
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
# Traverse the linked list in order.
|
||||
root = self.__root
|
||||
curr = root[1] # start at the first node
|
||||
while curr is not root:
|
||||
yield curr[2] # yield the curr[KEY]
|
||||
curr = curr[1] # move to next node
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
# Traverse the linked list in reverse order.
|
||||
root = self.__root
|
||||
curr = root[0] # start at the last node
|
||||
while curr is not root:
|
||||
yield curr[2] # yield the curr[KEY]
|
||||
curr = curr[0] # move to previous node
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
root = self.__root
|
||||
root[:] = [root, root, None]
|
||||
self.__map.clear()
|
||||
dict.clear(self)
|
||||
|
||||
# -- the following methods do not depend on the internal structure --
|
||||
|
||||
def keys(self):
|
||||
'od.keys() -> list of keys in od'
|
||||
return list(self)
|
||||
|
||||
def values(self):
|
||||
'od.values() -> list of values in od'
|
||||
return [self[key] for key in self]
|
||||
|
||||
def items(self):
|
||||
'od.items() -> list of (key, value) pairs in od'
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def iterkeys(self):
|
||||
'od.iterkeys() -> an iterator over the keys in od'
|
||||
return iter(self)
|
||||
|
||||
def itervalues(self):
|
||||
'od.itervalues -> an iterator over the values in od'
|
||||
for k in self:
|
||||
yield self[k]
|
||||
|
||||
def iteritems(self):
|
||||
'od.iteritems -> an iterator over the (key, value) pairs in od'
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
|
||||
update = MutableMapping.update
|
||||
|
||||
__update = update # let subclasses override update without breaking __init__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
||||
value. If key is not found, d is returned if given, otherwise KeyError
|
||||
is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
key = next(reversed(self) if last else iter(self))
|
||||
value = self.pop(key)
|
||||
return key, value
|
||||
|
||||
def __repr__(self, _repr_running={}):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
call_key = id(self), _get_ident()
|
||||
if call_key in _repr_running:
|
||||
return '...'
|
||||
_repr_running[call_key] = 1
|
||||
try:
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
finally:
|
||||
del _repr_running[call_key]
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
|
||||
If not specified, the value defaults to None.
|
||||
|
||||
'''
|
||||
self = cls()
|
||||
for key in iterable:
|
||||
self[key] = value
|
||||
return self
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return dict.__eq__(self, other) and all(_imap(_eq, self, other))
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
'od.__ne__(y) <==> od!=y'
|
||||
return not self == other
|
||||
|
||||
# -- the following methods support python 3.x style dictionary views --
|
||||
|
||||
def viewkeys(self):
|
||||
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
||||
return KeysView(self)
|
||||
|
||||
def viewvalues(self):
|
||||
"od.viewvalues() -> an object providing a view on od's values"
|
||||
return ValuesView(self)
|
||||
|
||||
def viewitems(self):
|
||||
"od.viewitems() -> a set-like object providing a view on od's items"
|
||||
return ItemsView(self)
|
||||
|
||||
|
||||
################################################################################
|
||||
### namedtuple
|
||||
################################################################################
|
||||
|
||||
_class_template = '''\
|
||||
class {typename}(tuple):
|
||||
'{typename}({arg_list})'
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
_fields = {field_names!r}
|
||||
|
||||
def __new__(_cls, {arg_list}):
|
||||
'Create new instance of {typename}({arg_list})'
|
||||
return _tuple.__new__(_cls, ({arg_list}))
|
||||
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
'Make a new {typename} object from a sequence or iterable'
|
||||
result = new(cls, iterable)
|
||||
if len(result) != {num_fields:d}:
|
||||
raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result))
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
'Return a nicely formatted representation string'
|
||||
return '{typename}({repr_fmt})' % self
|
||||
|
||||
def _asdict(self):
|
||||
'Return a new OrderedDict which maps field names to their values'
|
||||
return OrderedDict(zip(self._fields, self))
|
||||
|
||||
def _replace(_self, **kwds):
|
||||
'Return a new {typename} object replacing specified fields with new values'
|
||||
result = _self._make(map(kwds.pop, {field_names!r}, _self))
|
||||
if kwds:
|
||||
raise ValueError('Got unexpected field names: %r' % kwds.keys())
|
||||
return result
|
||||
|
||||
def __getnewargs__(self):
|
||||
'Return self as a plain tuple. Used by copy and pickle.'
|
||||
return tuple(self)
|
||||
|
||||
__dict__ = _property(_asdict)
|
||||
|
||||
def __getstate__(self):
|
||||
'Exclude the OrderedDict from pickling'
|
||||
pass
|
||||
|
||||
{field_defs}
|
||||
'''
|
||||
|
||||
_repr_template = '{name}=%r'
|
||||
|
||||
_field_template = '''\
|
||||
{name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}')
|
||||
'''
|
||||
|
||||
def namedtuple(typename, field_names, verbose=False, rename=False):
|
||||
"""Returns a new subclass of tuple with named fields.
|
||||
|
||||
>>> Point = namedtuple('Point', ['x', 'y'])
|
||||
>>> Point.__doc__ # docstring for the new class
|
||||
'Point(x, y)'
|
||||
>>> p = Point(11, y=22) # instantiate with positional args or keywords
|
||||
>>> p[0] + p[1] # indexable like a plain tuple
|
||||
33
|
||||
>>> x, y = p # unpack like a regular tuple
|
||||
>>> x, y
|
||||
(11, 22)
|
||||
>>> p.x + p.y # fields also accessable by name
|
||||
33
|
||||
>>> d = p._asdict() # convert to a dictionary
|
||||
>>> d['x']
|
||||
11
|
||||
>>> Point(**d) # convert from a dictionary
|
||||
Point(x=11, y=22)
|
||||
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
|
||||
Point(x=100, y=22)
|
||||
|
||||
"""
|
||||
|
||||
# Validate the field names. At the user's option, either generate an error
|
||||
# message or automatically replace the field name with a valid name.
|
||||
if isinstance(field_names, basestring):
|
||||
field_names = field_names.replace(',', ' ').split()
|
||||
field_names = map(str, field_names)
|
||||
if rename:
|
||||
seen = set()
|
||||
for index, name in enumerate(field_names):
|
||||
if (not all(c.isalnum() or c=='_' for c in name)
|
||||
or _iskeyword(name)
|
||||
or not name
|
||||
or name[0].isdigit()
|
||||
or name.startswith('_')
|
||||
or name in seen):
|
||||
field_names[index] = '_%d' % index
|
||||
seen.add(name)
|
||||
for name in [typename] + field_names:
|
||||
if not all(c.isalnum() or c=='_' for c in name):
|
||||
raise ValueError('Type names and field names can only contain '
|
||||
'alphanumeric characters and underscores: %r' % name)
|
||||
if _iskeyword(name):
|
||||
raise ValueError('Type names and field names cannot be a '
|
||||
'keyword: %r' % name)
|
||||
if name[0].isdigit():
|
||||
raise ValueError('Type names and field names cannot start with '
|
||||
'a number: %r' % name)
|
||||
seen = set()
|
||||
for name in field_names:
|
||||
if name.startswith('_') and not rename:
|
||||
raise ValueError('Field names cannot start with an underscore: '
|
||||
'%r' % name)
|
||||
if name in seen:
|
||||
raise ValueError('Encountered duplicate field name: %r' % name)
|
||||
seen.add(name)
|
||||
|
||||
# Fill-in the class template
|
||||
class_definition = _class_template.format(
|
||||
typename = typename,
|
||||
field_names = tuple(field_names),
|
||||
num_fields = len(field_names),
|
||||
arg_list = repr(tuple(field_names)).replace("'", "")[1:-1],
|
||||
repr_fmt = ', '.join(_repr_template.format(name=name)
|
||||
for name in field_names),
|
||||
field_defs = '\n'.join(_field_template.format(index=index, name=name)
|
||||
for index, name in enumerate(field_names))
|
||||
)
|
||||
if verbose:
|
||||
print class_definition
|
||||
|
||||
# Execute the template string in a temporary namespace and support
|
||||
# tracing utilities by setting a value for frame.f_globals['__name__']
|
||||
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
|
||||
OrderedDict=OrderedDict, _property=property, _tuple=tuple)
|
||||
try:
|
||||
exec class_definition in namespace
|
||||
except SyntaxError as e:
|
||||
raise SyntaxError(e.message + ':\n' + class_definition)
|
||||
result = namespace[typename]
|
||||
|
||||
# For pickling to work, the __module__ variable needs to be set to the frame
|
||||
# where the named tuple is created. Bypass this step in environments where
|
||||
# sys._getframe is not defined (Jython for example) or sys._getframe is not
|
||||
# defined for arguments greater than 0 (IronPython).
|
||||
try:
|
||||
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
|
||||
########################################################################
|
||||
### Counter
|
||||
########################################################################
|
||||
|
||||
class Counter(dict):
|
||||
'''Dict subclass for counting hashable items. Sometimes called a bag
|
||||
or multiset. Elements are stored as dictionary keys and their counts
|
||||
are stored as dictionary values.
|
||||
|
||||
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
|
||||
|
||||
>>> c.most_common(3) # three most common elements
|
||||
[('a', 5), ('b', 4), ('c', 3)]
|
||||
>>> sorted(c) # list all unique elements
|
||||
['a', 'b', 'c', 'd', 'e']
|
||||
>>> ''.join(sorted(c.elements())) # list elements with repetitions
|
||||
'aaaaabbbbcccdde'
|
||||
>>> sum(c.values()) # total of all counts
|
||||
15
|
||||
|
||||
>>> c['a'] # count of letter 'a'
|
||||
5
|
||||
>>> for elem in 'shazam': # update counts from an iterable
|
||||
... c[elem] += 1 # by adding 1 to each element's count
|
||||
>>> c['a'] # now there are seven 'a'
|
||||
7
|
||||
>>> del c['b'] # remove all 'b'
|
||||
>>> c['b'] # now there are zero 'b'
|
||||
0
|
||||
|
||||
>>> d = Counter('simsalabim') # make another counter
|
||||
>>> c.update(d) # add in the second counter
|
||||
>>> c['a'] # now there are nine 'a'
|
||||
9
|
||||
|
||||
>>> c.clear() # empty the counter
|
||||
>>> c
|
||||
Counter()
|
||||
|
||||
Note: If a count is set to zero or reduced to zero, it will remain
|
||||
in the counter until the entry is deleted or the counter is cleared:
|
||||
|
||||
>>> c = Counter('aaabbc')
|
||||
>>> c['b'] -= 2 # reduce the count of 'b' by two
|
||||
>>> c.most_common() # 'b' is still in, but its count is zero
|
||||
[('a', 3), ('c', 1), ('b', 0)]
|
||||
|
||||
'''
|
||||
# References:
|
||||
# http://en.wikipedia.org/wiki/Multiset
|
||||
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
|
||||
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
|
||||
# http://code.activestate.com/recipes/259174/
|
||||
# Knuth, TAOCP Vol. II section 4.6.3
|
||||
|
||||
def __init__(self, iterable=None, **kwds):
|
||||
'''Create a new, empty Counter object. And if given, count elements
|
||||
from an input iterable. Or, initialize the count from another mapping
|
||||
of elements to their counts.
|
||||
|
||||
>>> c = Counter() # a new, empty counter
|
||||
>>> c = Counter('gallahad') # a new counter from an iterable
|
||||
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
|
||||
>>> c = Counter(a=4, b=2) # a new counter from keyword args
|
||||
|
||||
'''
|
||||
super(Counter, self).__init__()
|
||||
self.update(iterable, **kwds)
|
||||
|
||||
def __missing__(self, key):
|
||||
'The count of elements not in the Counter is zero.'
|
||||
# Needed so that self[missing_item] does not raise KeyError
|
||||
return 0
|
||||
|
||||
def most_common(self, n=None):
|
||||
'''List the n most common elements and their counts from the most
|
||||
common to the least. If n is None, then list all element counts.
|
||||
|
||||
>>> Counter('abcdeabcdabcaba').most_common(3)
|
||||
[('a', 5), ('b', 4), ('c', 3)]
|
||||
|
||||
'''
|
||||
# Emulate Bag.sortedByCount from Smalltalk
|
||||
if n is None:
|
||||
return sorted(self.iteritems(), key=_itemgetter(1), reverse=True)
|
||||
return _heapq.nlargest(n, self.iteritems(), key=_itemgetter(1))
|
||||
|
||||
def elements(self):
|
||||
'''Iterator over elements repeating each as many times as its count.
|
||||
|
||||
>>> c = Counter('ABCABC')
|
||||
>>> sorted(c.elements())
|
||||
['A', 'A', 'B', 'B', 'C', 'C']
|
||||
|
||||
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
|
||||
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
|
||||
>>> product = 1
|
||||
>>> for factor in prime_factors.elements(): # loop over factors
|
||||
... product *= factor # and multiply them
|
||||
>>> product
|
||||
1836
|
||||
|
||||
Note, if an element's count has been set to zero or is a negative
|
||||
number, elements() will ignore it.
|
||||
|
||||
'''
|
||||
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
|
||||
return _chain.from_iterable(_starmap(_repeat, self.iteritems()))
|
||||
|
||||
# Override dict methods where necessary
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, v=None):
|
||||
# There is no equivalent method for counters because setting v=1
|
||||
# means that no element can have a count greater than one.
|
||||
raise NotImplementedError(
|
||||
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
|
||||
|
||||
def update(self, iterable=None, **kwds):
|
||||
'''Like dict.update() but add counts instead of replacing them.
|
||||
|
||||
Source can be an iterable, a dictionary, or another Counter instance.
|
||||
|
||||
>>> c = Counter('which')
|
||||
>>> c.update('witch') # add elements from another iterable
|
||||
>>> d = Counter('watch')
|
||||
>>> c.update(d) # add elements from another counter
|
||||
>>> c['h'] # four 'h' in which, witch, and watch
|
||||
4
|
||||
|
||||
'''
|
||||
# The regular dict.update() operation makes no sense here because the
|
||||
# replace behavior results in the some of original untouched counts
|
||||
# being mixed-in with all of the other counts for a mismash that
|
||||
# doesn't have a straight-forward interpretation in most counting
|
||||
# contexts. Instead, we implement straight-addition. Both the inputs
|
||||
# and outputs are allowed to contain zero and negative counts.
|
||||
|
||||
if iterable is not None:
|
||||
if isinstance(iterable, Mapping):
|
||||
if self:
|
||||
self_get = self.get
|
||||
for elem, count in iterable.iteritems():
|
||||
self[elem] = self_get(elem, 0) + count
|
||||
else:
|
||||
super(Counter, self).update(iterable) # fast path when counter is empty
|
||||
else:
|
||||
self_get = self.get
|
||||
for elem in iterable:
|
||||
self[elem] = self_get(elem, 0) + 1
|
||||
if kwds:
|
||||
self.update(kwds)
|
||||
|
||||
def subtract(self, iterable=None, **kwds):
|
||||
'''Like dict.update() but subtracts counts instead of replacing them.
|
||||
Counts can be reduced below zero. Both the inputs and outputs are
|
||||
allowed to contain zero and negative counts.
|
||||
|
||||
Source can be an iterable, a dictionary, or another Counter instance.
|
||||
|
||||
>>> c = Counter('which')
|
||||
>>> c.subtract('witch') # subtract elements from another iterable
|
||||
>>> c.subtract(Counter('watch')) # subtract elements from another counter
|
||||
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
|
||||
0
|
||||
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
|
||||
-1
|
||||
|
||||
'''
|
||||
if iterable is not None:
|
||||
self_get = self.get
|
||||
if isinstance(iterable, Mapping):
|
||||
for elem, count in iterable.items():
|
||||
self[elem] = self_get(elem, 0) - count
|
||||
else:
|
||||
for elem in iterable:
|
||||
self[elem] = self_get(elem, 0) - 1
|
||||
if kwds:
|
||||
self.subtract(kwds)
|
||||
|
||||
def copy(self):
|
||||
'Return a shallow copy.'
|
||||
return self.__class__(self)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (dict(self),)
|
||||
|
||||
def __delitem__(self, elem):
|
||||
'Like dict.__delitem__() but does not raise KeyError for missing values.'
|
||||
if elem in self:
|
||||
super(Counter, self).__delitem__(elem)
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return '%s()' % self.__class__.__name__
|
||||
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
|
||||
return '%s({%s})' % (self.__class__.__name__, items)
|
||||
|
||||
# Multiset-style mathematical operations discussed in:
|
||||
# Knuth TAOCP Volume II section 4.6.3 exercise 19
|
||||
# and at http://en.wikipedia.org/wiki/Multiset
|
||||
#
|
||||
# Outputs guaranteed to only include positive counts.
|
||||
#
|
||||
# To strip negative and zero counts, add-in an empty counter:
|
||||
# c += Counter()
|
||||
|
||||
def __add__(self, other):
|
||||
'''Add counts from two counters.
|
||||
|
||||
>>> Counter('abbb') + Counter('bcc')
|
||||
Counter({'b': 4, 'c': 2, 'a': 1})
|
||||
|
||||
'''
|
||||
if not isinstance(other, Counter):
|
||||
return NotImplemented
|
||||
result = Counter()
|
||||
for elem, count in self.items():
|
||||
newcount = count + other[elem]
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
for elem, count in other.items():
|
||||
if elem not in self and count > 0:
|
||||
result[elem] = count
|
||||
return result
|
||||
|
||||
def __sub__(self, other):
|
||||
''' Subtract count, but keep only results with positive counts.
|
||||
|
||||
>>> Counter('abbbc') - Counter('bccd')
|
||||
Counter({'b': 2, 'a': 1})
|
||||
|
||||
'''
|
||||
if not isinstance(other, Counter):
|
||||
return NotImplemented
|
||||
result = Counter()
|
||||
for elem, count in self.items():
|
||||
newcount = count - other[elem]
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
for elem, count in other.items():
|
||||
if elem not in self and count < 0:
|
||||
result[elem] = 0 - count
|
||||
return result
|
||||
|
||||
def __or__(self, other):
|
||||
'''Union is the maximum of value in either of the input counters.
|
||||
|
||||
>>> Counter('abbb') | Counter('bcc')
|
||||
Counter({'b': 3, 'c': 2, 'a': 1})
|
||||
|
||||
'''
|
||||
if not isinstance(other, Counter):
|
||||
return NotImplemented
|
||||
result = Counter()
|
||||
for elem, count in self.items():
|
||||
other_count = other[elem]
|
||||
newcount = other_count if count < other_count else count
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
for elem, count in other.items():
|
||||
if elem not in self and count > 0:
|
||||
result[elem] = count
|
||||
return result
|
||||
|
||||
def __and__(self, other):
|
||||
''' Intersection is the minimum of corresponding counts.
|
||||
|
||||
>>> Counter('abbb') & Counter('bcc')
|
||||
Counter({'b': 1})
|
||||
|
||||
'''
|
||||
if not isinstance(other, Counter):
|
||||
return NotImplemented
|
||||
result = Counter()
|
||||
for elem, count in self.items():
|
||||
other_count = other[elem]
|
||||
newcount = count if count < other_count else other_count
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# verify that instances can be pickled
|
||||
from cPickle import loads, dumps
|
||||
Point = namedtuple('Point', 'x, y', True)
|
||||
p = Point(x=10, y=20)
|
||||
assert p == loads(dumps(p))
|
||||
|
||||
# test and demonstrate ability to override methods
|
||||
class Point(namedtuple('Point', 'x y')):
|
||||
__slots__ = ()
|
||||
@property
|
||||
def hypot(self):
|
||||
return (self.x ** 2 + self.y ** 2) ** 0.5
|
||||
def __str__(self):
|
||||
return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot)
|
||||
|
||||
for p in Point(3, 4), Point(14, 5/7.):
|
||||
print p
|
||||
|
||||
class Point(namedtuple('Point', 'x y')):
|
||||
'Point class with optimized _make() and _replace() without error-checking'
|
||||
__slots__ = ()
|
||||
_make = classmethod(tuple.__new__)
|
||||
def _replace(self, _map=map, **kwds):
|
||||
return self._make(_map(kwds.get, ('x', 'y'), self))
|
||||
|
||||
print Point(11, 22)._replace(x=100)
|
||||
|
||||
Point3D = namedtuple('Point3D', Point._fields + ('z',))
|
||||
print Point3D.__doc__
|
||||
|
||||
import doctest
|
||||
TestResults = namedtuple('TestResults', 'failed attempted')
|
||||
print TestResults(*doctest.testmod())
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
"""Execute shell commands via os.popen() and return status, output.
|
||||
|
||||
Interface summary:
|
||||
|
||||
import commands
|
||||
|
||||
outtext = commands.getoutput(cmd)
|
||||
(exitstatus, outtext) = commands.getstatusoutput(cmd)
|
||||
outtext = commands.getstatus(file) # returns output of "ls -ld file"
|
||||
|
||||
A trailing newline is removed from the output string.
|
||||
|
||||
Encapsulates the basic operation:
|
||||
|
||||
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
|
||||
text = pipe.read()
|
||||
sts = pipe.close()
|
||||
|
||||
[Note: it would be nice to add functions to interpret the exit status.]
|
||||
"""
|
||||
from warnings import warnpy3k
|
||||
warnpy3k("the commands module has been removed in Python 3.0; "
|
||||
"use the subprocess module instead", stacklevel=2)
|
||||
del warnpy3k
|
||||
|
||||
__all__ = ["getstatusoutput","getoutput","getstatus"]
|
||||
|
||||
# Module 'commands'
|
||||
#
|
||||
# Various tools for executing commands and looking at their output and status.
|
||||
#
|
||||
# NB This only works (and is only relevant) for UNIX.
|
||||
|
||||
|
||||
# Get 'ls -l' status for an object into a string
|
||||
#
|
||||
def getstatus(file):
|
||||
"""Return output of "ls -ld <file>" in a string."""
|
||||
import warnings
|
||||
warnings.warn("commands.getstatus() is deprecated", DeprecationWarning, 2)
|
||||
return getoutput('ls -ld' + mkarg(file))
|
||||
|
||||
|
||||
# Get the output from a shell command into a string.
|
||||
# The exit status is ignored; a trailing newline is stripped.
|
||||
# Assume the command will work with '{ ... ; } 2>&1' around it..
|
||||
#
|
||||
def getoutput(cmd):
|
||||
"""Return output (stdout or stderr) of executing cmd in a shell."""
|
||||
return getstatusoutput(cmd)[1]
|
||||
|
||||
|
||||
# Ditto but preserving the exit status.
|
||||
# Returns a pair (sts, output)
|
||||
#
|
||||
def getstatusoutput(cmd):
|
||||
"""Return (status, output) of executing cmd in a shell."""
|
||||
import os
|
||||
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
|
||||
text = pipe.read()
|
||||
sts = pipe.close()
|
||||
if sts is None: sts = 0
|
||||
if text[-1:] == '\n': text = text[:-1]
|
||||
return sts, text
|
||||
|
||||
|
||||
# Make command argument from directory and pathname (prefix space, add quotes).
|
||||
#
|
||||
def mk2arg(head, x):
|
||||
import os
|
||||
return mkarg(os.path.join(head, x))
|
||||
|
||||
|
||||
# Make a shell command argument from a string.
|
||||
# Return a string beginning with a space followed by a shell-quoted
|
||||
# version of the argument.
|
||||
# Two strategies: enclose in single quotes if it contains none;
|
||||
# otherwise, enclose in double quotes and prefix quotable characters
|
||||
# with backslash.
|
||||
#
|
||||
def mkarg(x):
|
||||
if '\'' not in x:
|
||||
return ' \'' + x + '\''
|
||||
s = ' "'
|
||||
for c in x:
|
||||
if c in '\\$"`':
|
||||
s = s + '\\'
|
||||
s = s + c
|
||||
s = s + '"'
|
||||
return s
|
||||
|
|
@ -1,227 +0,0 @@
|
|||
"""Module/script to byte-compile all .py files to .pyc (or .pyo) files.
|
||||
|
||||
When called as a script with arguments, this compiles the directories
|
||||
given as arguments recursively; the -l option prevents it from
|
||||
recursing into directories.
|
||||
|
||||
Without arguments, if compiles all modules on sys.path, without
|
||||
recursing into subdirectories. (Even though it should do so for
|
||||
packages -- for now, you'll have to deal with packages separately.)
|
||||
|
||||
See module py_compile for details of the actual byte-compilation.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import py_compile
|
||||
import struct
|
||||
import imp
|
||||
|
||||
__all__ = ["compile_dir","compile_file","compile_path"]
|
||||
|
||||
def compile_dir(dir, maxlevels=10, ddir=None,
|
||||
force=0, rx=None, quiet=0):
|
||||
"""Byte-compile all modules in the given directory tree.
|
||||
|
||||
Arguments (only dir is required):
|
||||
|
||||
dir: the directory to byte-compile
|
||||
maxlevels: maximum recursion level (default 10)
|
||||
ddir: the directory that will be prepended to the path to the
|
||||
file as it is compiled into each byte-code file.
|
||||
force: if 1, force compilation, even if timestamps are up-to-date
|
||||
quiet: if 1, be quiet during compilation
|
||||
"""
|
||||
if not quiet:
|
||||
print 'Listing', dir, '...'
|
||||
try:
|
||||
names = os.listdir(dir)
|
||||
except os.error:
|
||||
print "Can't list", dir
|
||||
names = []
|
||||
names.sort()
|
||||
success = 1
|
||||
for name in names:
|
||||
fullname = os.path.join(dir, name)
|
||||
if ddir is not None:
|
||||
dfile = os.path.join(ddir, name)
|
||||
else:
|
||||
dfile = None
|
||||
if not os.path.isdir(fullname):
|
||||
if not compile_file(fullname, ddir, force, rx, quiet):
|
||||
success = 0
|
||||
elif maxlevels > 0 and \
|
||||
name != os.curdir and name != os.pardir and \
|
||||
os.path.isdir(fullname) and \
|
||||
not os.path.islink(fullname):
|
||||
if not compile_dir(fullname, maxlevels - 1, dfile, force, rx,
|
||||
quiet):
|
||||
success = 0
|
||||
return success
|
||||
|
||||
def compile_file(fullname, ddir=None, force=0, rx=None, quiet=0):
|
||||
"""Byte-compile one file.
|
||||
|
||||
Arguments (only fullname is required):
|
||||
|
||||
fullname: the file to byte-compile
|
||||
ddir: if given, the directory name compiled in to the
|
||||
byte-code file.
|
||||
force: if 1, force compilation, even if timestamps are up-to-date
|
||||
quiet: if 1, be quiet during compilation
|
||||
"""
|
||||
success = 1
|
||||
name = os.path.basename(fullname)
|
||||
if ddir is not None:
|
||||
dfile = os.path.join(ddir, name)
|
||||
else:
|
||||
dfile = None
|
||||
if rx is not None:
|
||||
mo = rx.search(fullname)
|
||||
if mo:
|
||||
return success
|
||||
if os.path.isfile(fullname):
|
||||
head, tail = name[:-3], name[-3:]
|
||||
if tail == '.py':
|
||||
if not force:
|
||||
try:
|
||||
mtime = int(os.stat(fullname).st_mtime)
|
||||
expect = struct.pack('<4sl', imp.get_magic(), mtime)
|
||||
cfile = fullname + (__debug__ and 'c' or 'o')
|
||||
with open(cfile, 'rb') as chandle:
|
||||
actual = chandle.read(8)
|
||||
if expect == actual:
|
||||
return success
|
||||
except IOError:
|
||||
pass
|
||||
if not quiet:
|
||||
print 'Compiling', fullname, '...'
|
||||
try:
|
||||
ok = py_compile.compile(fullname, None, dfile, True)
|
||||
except py_compile.PyCompileError,err:
|
||||
if quiet:
|
||||
print 'Compiling', fullname, '...'
|
||||
print err.msg
|
||||
success = 0
|
||||
except IOError, e:
|
||||
print "Sorry", e
|
||||
success = 0
|
||||
else:
|
||||
if ok == 0:
|
||||
success = 0
|
||||
return success
|
||||
|
||||
def compile_path(skip_curdir=1, maxlevels=0, force=0, quiet=0):
|
||||
"""Byte-compile all module on sys.path.
|
||||
|
||||
Arguments (all optional):
|
||||
|
||||
skip_curdir: if true, skip current directory (default true)
|
||||
maxlevels: max recursion level (default 0)
|
||||
force: as for compile_dir() (default 0)
|
||||
quiet: as for compile_dir() (default 0)
|
||||
"""
|
||||
success = 1
|
||||
for dir in sys.path:
|
||||
if (not dir or dir == os.curdir) and skip_curdir:
|
||||
print 'Skipping current directory'
|
||||
else:
|
||||
success = success and compile_dir(dir, maxlevels, None,
|
||||
force, quiet=quiet)
|
||||
return success
|
||||
|
||||
def expand_args(args, flist):
|
||||
"""read names in flist and append to args"""
|
||||
expanded = args[:]
|
||||
if flist:
|
||||
try:
|
||||
if flist == '-':
|
||||
fd = sys.stdin
|
||||
else:
|
||||
fd = open(flist)
|
||||
while 1:
|
||||
line = fd.readline()
|
||||
if not line:
|
||||
break
|
||||
expanded.append(line[:-1])
|
||||
except IOError:
|
||||
print "Error reading file list %s" % flist
|
||||
raise
|
||||
return expanded
|
||||
|
||||
def main():
|
||||
"""Script main program."""
|
||||
import getopt
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'lfqd:x:i:')
|
||||
except getopt.error, msg:
|
||||
print msg
|
||||
print "usage: python compileall.py [-l] [-f] [-q] [-d destdir] " \
|
||||
"[-x regexp] [-i list] [directory|file ...]"
|
||||
print
|
||||
print "arguments: zero or more file and directory names to compile; " \
|
||||
"if no arguments given, "
|
||||
print " defaults to the equivalent of -l sys.path"
|
||||
print
|
||||
print "options:"
|
||||
print "-l: don't recurse into subdirectories"
|
||||
print "-f: force rebuild even if timestamps are up-to-date"
|
||||
print "-q: output only error messages"
|
||||
print "-d destdir: directory to prepend to file paths for use in " \
|
||||
"compile-time tracebacks and in"
|
||||
print " runtime tracebacks in cases where the source " \
|
||||
"file is unavailable"
|
||||
print "-x regexp: skip files matching the regular expression regexp; " \
|
||||
"the regexp is searched for"
|
||||
print " in the full path of each file considered for " \
|
||||
"compilation"
|
||||
print "-i file: add all the files and directories listed in file to " \
|
||||
"the list considered for"
|
||||
print ' compilation; if "-", names are read from stdin'
|
||||
|
||||
sys.exit(2)
|
||||
maxlevels = 10
|
||||
ddir = None
|
||||
force = 0
|
||||
quiet = 0
|
||||
rx = None
|
||||
flist = None
|
||||
for o, a in opts:
|
||||
if o == '-l': maxlevels = 0
|
||||
if o == '-d': ddir = a
|
||||
if o == '-f': force = 1
|
||||
if o == '-q': quiet = 1
|
||||
if o == '-x':
|
||||
import re
|
||||
rx = re.compile(a)
|
||||
if o == '-i': flist = a
|
||||
if ddir:
|
||||
if len(args) != 1 and not os.path.isdir(args[0]):
|
||||
print "-d destdir require exactly one directory argument"
|
||||
sys.exit(2)
|
||||
success = 1
|
||||
try:
|
||||
if args or flist:
|
||||
try:
|
||||
if flist:
|
||||
args = expand_args(args, flist)
|
||||
except IOError:
|
||||
success = 0
|
||||
if success:
|
||||
for arg in args:
|
||||
if os.path.isdir(arg):
|
||||
if not compile_dir(arg, maxlevels, ddir,
|
||||
force, rx, quiet):
|
||||
success = 0
|
||||
else:
|
||||
if not compile_file(arg, ddir, force, rx, quiet):
|
||||
success = 0
|
||||
else:
|
||||
success = compile_path()
|
||||
except KeyboardInterrupt:
|
||||
print "\n[interrupted]"
|
||||
success = 0
|
||||
return success
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit_status = int(not main())
|
||||
sys.exit(exit_status)
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
"""Package for parsing and compiling Python source code
|
||||
|
||||
There are several functions defined at the top level that are imported
|
||||
from modules contained in the package.
|
||||
|
||||
parse(buf, mode="exec") -> AST
|
||||
Converts a string containing Python source code to an abstract
|
||||
syntax tree (AST). The AST is defined in compiler.ast.
|
||||
|
||||
parseFile(path) -> AST
|
||||
The same as parse(open(path))
|
||||
|
||||
walk(ast, visitor, verbose=None)
|
||||
Does a pre-order walk over the ast using the visitor instance.
|
||||
See compiler.visitor for details.
|
||||
|
||||
compile(source, filename, mode, flags=None, dont_inherit=None)
|
||||
Returns a code object. A replacement for the builtin compile() function.
|
||||
|
||||
compileFile(filename)
|
||||
Generates a .pyc file by compiling filename.
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
warnings.warn("The compiler package is deprecated and removed in Python 3.x.",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
||||
from compiler.transformer import parse, parseFile
|
||||
from compiler.visitor import walk
|
||||
from compiler.pycodegen import compile, compileFile
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,23 +0,0 @@
|
|||
# operation flags
|
||||
OP_ASSIGN = 'OP_ASSIGN'
|
||||
OP_DELETE = 'OP_DELETE'
|
||||
OP_APPLY = 'OP_APPLY'
|
||||
|
||||
SC_LOCAL = 1
|
||||
SC_GLOBAL_IMPLICIT = 2
|
||||
SC_GLOBAL_EXPLICIT = 3
|
||||
SC_FREE = 4
|
||||
SC_CELL = 5
|
||||
SC_UNKNOWN = 6
|
||||
|
||||
CO_OPTIMIZED = 0x0001
|
||||
CO_NEWLOCALS = 0x0002
|
||||
CO_VARARGS = 0x0004
|
||||
CO_VARKEYWORDS = 0x0008
|
||||
CO_NESTED = 0x0010
|
||||
CO_GENERATOR = 0x0020
|
||||
CO_GENERATOR_ALLOWED = 0
|
||||
CO_FUTURE_DIVISION = 0x2000
|
||||
CO_FUTURE_ABSIMPORT = 0x4000
|
||||
CO_FUTURE_WITH_STATEMENT = 0x8000
|
||||
CO_FUTURE_PRINT_FUNCTION = 0x10000
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
"""Parser for future statements
|
||||
|
||||
"""
|
||||
|
||||
from compiler import ast, walk
|
||||
|
||||
def is_future(stmt):
|
||||
"""Return true if statement is a well-formed future statement"""
|
||||
if not isinstance(stmt, ast.From):
|
||||
return 0
|
||||
if stmt.modname == "__future__":
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
class FutureParser:
|
||||
|
||||
features = ("nested_scopes", "generators", "division",
|
||||
"absolute_import", "with_statement", "print_function",
|
||||
"unicode_literals")
|
||||
|
||||
def __init__(self):
|
||||
self.found = {} # set
|
||||
|
||||
def visitModule(self, node):
|
||||
stmt = node.node
|
||||
for s in stmt.nodes:
|
||||
if not self.check_stmt(s):
|
||||
break
|
||||
|
||||
def check_stmt(self, stmt):
|
||||
if is_future(stmt):
|
||||
for name, asname in stmt.names:
|
||||
if name in self.features:
|
||||
self.found[name] = 1
|
||||
else:
|
||||
raise SyntaxError, \
|
||||
"future feature %s is not defined" % name
|
||||
stmt.valid_future = 1
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def get_features(self):
|
||||
"""Return list of features enabled by future statements"""
|
||||
return self.found.keys()
|
||||
|
||||
class BadFutureParser:
|
||||
"""Check for invalid future statements"""
|
||||
|
||||
def visitFrom(self, node):
|
||||
if hasattr(node, 'valid_future'):
|
||||
return
|
||||
if node.modname != "__future__":
|
||||
return
|
||||
raise SyntaxError, "invalid future statement " + repr(node)
|
||||
|
||||
def find_futures(node):
|
||||
p1 = FutureParser()
|
||||
p2 = BadFutureParser()
|
||||
walk(node, p1)
|
||||
walk(node, p2)
|
||||
return p1.get_features()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
from compiler import parseFile, walk
|
||||
|
||||
for file in sys.argv[1:]:
|
||||
print file
|
||||
tree = parseFile(file)
|
||||
v = FutureParser()
|
||||
walk(tree, v)
|
||||
print v.found
|
||||
print
|
||||
|
|
@ -1,73 +0,0 @@
|
|||
|
||||
def flatten(tup):
|
||||
elts = []
|
||||
for elt in tup:
|
||||
if isinstance(elt, tuple):
|
||||
elts = elts + flatten(elt)
|
||||
else:
|
||||
elts.append(elt)
|
||||
return elts
|
||||
|
||||
class Set:
|
||||
def __init__(self):
|
||||
self.elts = {}
|
||||
def __len__(self):
|
||||
return len(self.elts)
|
||||
def __contains__(self, elt):
|
||||
return elt in self.elts
|
||||
def add(self, elt):
|
||||
self.elts[elt] = elt
|
||||
def elements(self):
|
||||
return self.elts.keys()
|
||||
def has_elt(self, elt):
|
||||
return elt in self.elts
|
||||
def remove(self, elt):
|
||||
del self.elts[elt]
|
||||
def copy(self):
|
||||
c = Set()
|
||||
c.elts.update(self.elts)
|
||||
return c
|
||||
|
||||
class Stack:
|
||||
def __init__(self):
|
||||
self.stack = []
|
||||
self.pop = self.stack.pop
|
||||
def __len__(self):
|
||||
return len(self.stack)
|
||||
def push(self, elt):
|
||||
self.stack.append(elt)
|
||||
def top(self):
|
||||
return self.stack[-1]
|
||||
def __getitem__(self, index): # needed by visitContinue()
|
||||
return self.stack[index]
|
||||
|
||||
MANGLE_LEN = 256 # magic constant from compile.c
|
||||
|
||||
def mangle(name, klass):
|
||||
if not name.startswith('__'):
|
||||
return name
|
||||
if len(name) + 2 >= MANGLE_LEN:
|
||||
return name
|
||||
if name.endswith('__'):
|
||||
return name
|
||||
try:
|
||||
i = 0
|
||||
while klass[i] == '_':
|
||||
i = i + 1
|
||||
except IndexError:
|
||||
return name
|
||||
klass = klass[i:]
|
||||
|
||||
tlen = len(klass) + len(name)
|
||||
if tlen > MANGLE_LEN:
|
||||
klass = klass[:MANGLE_LEN-tlen]
|
||||
|
||||
return "_%s%s" % (klass, name)
|
||||
|
||||
def set_filename(filename, tree):
|
||||
"""Set the filename attribute to filename on every node in tree"""
|
||||
worklist = [tree]
|
||||
while worklist:
|
||||
node = worklist.pop(0)
|
||||
node.filename = filename
|
||||
worklist.extend(node.getChildNodes())
|
||||
|
|
@ -1,763 +0,0 @@
|
|||
"""A flow graph representation for Python bytecode"""
|
||||
|
||||
import dis
|
||||
import types
|
||||
import sys
|
||||
|
||||
from compiler import misc
|
||||
from compiler.consts \
|
||||
import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
|
||||
|
||||
class FlowGraph:
|
||||
def __init__(self):
|
||||
self.current = self.entry = Block()
|
||||
self.exit = Block("exit")
|
||||
self.blocks = misc.Set()
|
||||
self.blocks.add(self.entry)
|
||||
self.blocks.add(self.exit)
|
||||
|
||||
def startBlock(self, block):
|
||||
if self._debug:
|
||||
if self.current:
|
||||
print "end", repr(self.current)
|
||||
print " next", self.current.next
|
||||
print " prev", self.current.prev
|
||||
print " ", self.current.get_children()
|
||||
print repr(block)
|
||||
self.current = block
|
||||
|
||||
def nextBlock(self, block=None):
|
||||
# XXX think we need to specify when there is implicit transfer
|
||||
# from one block to the next. might be better to represent this
|
||||
# with explicit JUMP_ABSOLUTE instructions that are optimized
|
||||
# out when they are unnecessary.
|
||||
#
|
||||
# I think this strategy works: each block has a child
|
||||
# designated as "next" which is returned as the last of the
|
||||
# children. because the nodes in a graph are emitted in
|
||||
# reverse post order, the "next" block will always be emitted
|
||||
# immediately after its parent.
|
||||
# Worry: maintaining this invariant could be tricky
|
||||
if block is None:
|
||||
block = self.newBlock()
|
||||
|
||||
# Note: If the current block ends with an unconditional control
|
||||
# transfer, then it is techically incorrect to add an implicit
|
||||
# transfer to the block graph. Doing so results in code generation
|
||||
# for unreachable blocks. That doesn't appear to be very common
|
||||
# with Python code and since the built-in compiler doesn't optimize
|
||||
# it out we don't either.
|
||||
self.current.addNext(block)
|
||||
self.startBlock(block)
|
||||
|
||||
def newBlock(self):
|
||||
b = Block()
|
||||
self.blocks.add(b)
|
||||
return b
|
||||
|
||||
def startExitBlock(self):
|
||||
self.startBlock(self.exit)
|
||||
|
||||
_debug = 0
|
||||
|
||||
def _enable_debug(self):
|
||||
self._debug = 1
|
||||
|
||||
def _disable_debug(self):
|
||||
self._debug = 0
|
||||
|
||||
def emit(self, *inst):
|
||||
if self._debug:
|
||||
print "\t", inst
|
||||
if len(inst) == 2 and isinstance(inst[1], Block):
|
||||
self.current.addOutEdge(inst[1])
|
||||
self.current.emit(inst)
|
||||
|
||||
def getBlocksInOrder(self):
|
||||
"""Return the blocks in reverse postorder
|
||||
|
||||
i.e. each node appears before all of its successors
|
||||
"""
|
||||
order = order_blocks(self.entry, self.exit)
|
||||
return order
|
||||
|
||||
def getBlocks(self):
|
||||
return self.blocks.elements()
|
||||
|
||||
def getRoot(self):
|
||||
"""Return nodes appropriate for use with dominator"""
|
||||
return self.entry
|
||||
|
||||
def getContainedGraphs(self):
|
||||
l = []
|
||||
for b in self.getBlocks():
|
||||
l.extend(b.getContainedGraphs())
|
||||
return l
|
||||
|
||||
|
||||
def order_blocks(start_block, exit_block):
|
||||
"""Order blocks so that they are emitted in the right order"""
|
||||
# Rules:
|
||||
# - when a block has a next block, the next block must be emitted just after
|
||||
# - when a block has followers (relative jumps), it must be emitted before
|
||||
# them
|
||||
# - all reachable blocks must be emitted
|
||||
order = []
|
||||
|
||||
# Find all the blocks to be emitted.
|
||||
remaining = set()
|
||||
todo = [start_block]
|
||||
while todo:
|
||||
b = todo.pop()
|
||||
if b in remaining:
|
||||
continue
|
||||
remaining.add(b)
|
||||
for c in b.get_children():
|
||||
if c not in remaining:
|
||||
todo.append(c)
|
||||
|
||||
# A block is dominated by another block if that block must be emitted
|
||||
# before it.
|
||||
dominators = {}
|
||||
for b in remaining:
|
||||
if __debug__ and b.next:
|
||||
assert b is b.next[0].prev[0], (b, b.next)
|
||||
# Make sure every block appears in dominators, even if no
|
||||
# other block must precede it.
|
||||
dominators.setdefault(b, set())
|
||||
# preceding blocks dominate following blocks
|
||||
for c in b.get_followers():
|
||||
while 1:
|
||||
dominators.setdefault(c, set()).add(b)
|
||||
# Any block that has a next pointer leading to c is also
|
||||
# dominated because the whole chain will be emitted at once.
|
||||
# Walk backwards and add them all.
|
||||
if c.prev and c.prev[0] is not b:
|
||||
c = c.prev[0]
|
||||
else:
|
||||
break
|
||||
|
||||
def find_next():
|
||||
# Find a block that can be emitted next.
|
||||
for b in remaining:
|
||||
for c in dominators[b]:
|
||||
if c in remaining:
|
||||
break # can't emit yet, dominated by a remaining block
|
||||
else:
|
||||
return b
|
||||
assert 0, 'circular dependency, cannot find next block'
|
||||
|
||||
b = start_block
|
||||
while 1:
|
||||
order.append(b)
|
||||
remaining.discard(b)
|
||||
if b.next:
|
||||
b = b.next[0]
|
||||
continue
|
||||
elif b is not exit_block and not b.has_unconditional_transfer():
|
||||
order.append(exit_block)
|
||||
if not remaining:
|
||||
break
|
||||
b = find_next()
|
||||
return order
|
||||
|
||||
|
||||
class Block:
|
||||
_count = 0
|
||||
|
||||
def __init__(self, label=''):
|
||||
self.insts = []
|
||||
self.outEdges = set()
|
||||
self.label = label
|
||||
self.bid = Block._count
|
||||
self.next = []
|
||||
self.prev = []
|
||||
Block._count = Block._count + 1
|
||||
|
||||
def __repr__(self):
|
||||
if self.label:
|
||||
return "<block %s id=%d>" % (self.label, self.bid)
|
||||
else:
|
||||
return "<block id=%d>" % (self.bid)
|
||||
|
||||
def __str__(self):
|
||||
insts = map(str, self.insts)
|
||||
return "<block %s %d:\n%s>" % (self.label, self.bid,
|
||||
'\n'.join(insts))
|
||||
|
||||
def emit(self, inst):
|
||||
op = inst[0]
|
||||
self.insts.append(inst)
|
||||
|
||||
def getInstructions(self):
|
||||
return self.insts
|
||||
|
||||
def addOutEdge(self, block):
|
||||
self.outEdges.add(block)
|
||||
|
||||
def addNext(self, block):
|
||||
self.next.append(block)
|
||||
assert len(self.next) == 1, map(str, self.next)
|
||||
block.prev.append(self)
|
||||
assert len(block.prev) == 1, map(str, block.prev)
|
||||
|
||||
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS',
|
||||
'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP',
|
||||
)
|
||||
|
||||
def has_unconditional_transfer(self):
|
||||
"""Returns True if there is an unconditional transfer to an other block
|
||||
at the end of this block. This means there is no risk for the bytecode
|
||||
executer to go past this block's bytecode."""
|
||||
try:
|
||||
op, arg = self.insts[-1]
|
||||
except (IndexError, ValueError):
|
||||
return
|
||||
return op in self._uncond_transfer
|
||||
|
||||
def get_children(self):
|
||||
return list(self.outEdges) + self.next
|
||||
|
||||
def get_followers(self):
|
||||
"""Get the whole list of followers, including the next block."""
|
||||
followers = set(self.next)
|
||||
# Blocks that must be emitted *after* this one, because of
|
||||
# bytecode offsets (e.g. relative jumps) pointing to them.
|
||||
for inst in self.insts:
|
||||
if inst[0] in PyFlowGraph.hasjrel:
|
||||
followers.add(inst[1])
|
||||
return followers
|
||||
|
||||
def getContainedGraphs(self):
|
||||
"""Return all graphs contained within this block.
|
||||
|
||||
For example, a MAKE_FUNCTION block will contain a reference to
|
||||
the graph for the function body.
|
||||
"""
|
||||
contained = []
|
||||
for inst in self.insts:
|
||||
if len(inst) == 1:
|
||||
continue
|
||||
op = inst[1]
|
||||
if hasattr(op, 'graph'):
|
||||
contained.append(op.graph)
|
||||
return contained
|
||||
|
||||
# flags for code objects
|
||||
|
||||
# the FlowGraph is transformed in place; it exists in one of these states
|
||||
RAW = "RAW"
|
||||
FLAT = "FLAT"
|
||||
CONV = "CONV"
|
||||
DONE = "DONE"
|
||||
|
||||
class PyFlowGraph(FlowGraph):
|
||||
super_init = FlowGraph.__init__
|
||||
|
||||
def __init__(self, name, filename, args=(), optimized=0, klass=None):
|
||||
self.super_init()
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.docstring = None
|
||||
self.args = args # XXX
|
||||
self.argcount = getArgCount(args)
|
||||
self.klass = klass
|
||||
if optimized:
|
||||
self.flags = CO_OPTIMIZED | CO_NEWLOCALS
|
||||
else:
|
||||
self.flags = 0
|
||||
self.consts = []
|
||||
self.names = []
|
||||
# Free variables found by the symbol table scan, including
|
||||
# variables used only in nested scopes, are included here.
|
||||
self.freevars = []
|
||||
self.cellvars = []
|
||||
# The closure list is used to track the order of cell
|
||||
# variables and free variables in the resulting code object.
|
||||
# The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
|
||||
# kinds of variables.
|
||||
self.closure = []
|
||||
self.varnames = list(args) or []
|
||||
for i in range(len(self.varnames)):
|
||||
var = self.varnames[i]
|
||||
if isinstance(var, TupleArg):
|
||||
self.varnames[i] = var.getName()
|
||||
self.stage = RAW
|
||||
|
||||
def setDocstring(self, doc):
|
||||
self.docstring = doc
|
||||
|
||||
def setFlag(self, flag):
|
||||
self.flags = self.flags | flag
|
||||
if flag == CO_VARARGS:
|
||||
self.argcount = self.argcount - 1
|
||||
|
||||
def checkFlag(self, flag):
|
||||
if self.flags & flag:
|
||||
return 1
|
||||
|
||||
def setFreeVars(self, names):
|
||||
self.freevars = list(names)
|
||||
|
||||
def setCellVars(self, names):
|
||||
self.cellvars = names
|
||||
|
||||
def getCode(self):
|
||||
"""Get a Python code object"""
|
||||
assert self.stage == RAW
|
||||
self.computeStackDepth()
|
||||
self.flattenGraph()
|
||||
assert self.stage == FLAT
|
||||
self.convertArgs()
|
||||
assert self.stage == CONV
|
||||
self.makeByteCode()
|
||||
assert self.stage == DONE
|
||||
return self.newCodeObject()
|
||||
|
||||
def dump(self, io=None):
|
||||
if io:
|
||||
save = sys.stdout
|
||||
sys.stdout = io
|
||||
pc = 0
|
||||
for t in self.insts:
|
||||
opname = t[0]
|
||||
if opname == "SET_LINENO":
|
||||
print
|
||||
if len(t) == 1:
|
||||
print "\t", "%3d" % pc, opname
|
||||
pc = pc + 1
|
||||
else:
|
||||
print "\t", "%3d" % pc, opname, t[1]
|
||||
pc = pc + 3
|
||||
if io:
|
||||
sys.stdout = save
|
||||
|
||||
def computeStackDepth(self):
|
||||
"""Compute the max stack depth.
|
||||
|
||||
Approach is to compute the stack effect of each basic block.
|
||||
Then find the path through the code with the largest total
|
||||
effect.
|
||||
"""
|
||||
depth = {}
|
||||
exit = None
|
||||
for b in self.getBlocks():
|
||||
depth[b] = findDepth(b.getInstructions())
|
||||
|
||||
seen = {}
|
||||
|
||||
def max_depth(b, d):
|
||||
if b in seen:
|
||||
return d
|
||||
seen[b] = 1
|
||||
d = d + depth[b]
|
||||
children = b.get_children()
|
||||
if children:
|
||||
return max([max_depth(c, d) for c in children])
|
||||
else:
|
||||
if not b.label == "exit":
|
||||
return max_depth(self.exit, d)
|
||||
else:
|
||||
return d
|
||||
|
||||
self.stacksize = max_depth(self.entry, 0)
|
||||
|
||||
def flattenGraph(self):
|
||||
"""Arrange the blocks in order and resolve jumps"""
|
||||
assert self.stage == RAW
|
||||
self.insts = insts = []
|
||||
pc = 0
|
||||
begin = {}
|
||||
end = {}
|
||||
for b in self.getBlocksInOrder():
|
||||
begin[b] = pc
|
||||
for inst in b.getInstructions():
|
||||
insts.append(inst)
|
||||
if len(inst) == 1:
|
||||
pc = pc + 1
|
||||
elif inst[0] != "SET_LINENO":
|
||||
# arg takes 2 bytes
|
||||
pc = pc + 3
|
||||
end[b] = pc
|
||||
pc = 0
|
||||
for i in range(len(insts)):
|
||||
inst = insts[i]
|
||||
if len(inst) == 1:
|
||||
pc = pc + 1
|
||||
elif inst[0] != "SET_LINENO":
|
||||
pc = pc + 3
|
||||
opname = inst[0]
|
||||
if opname in self.hasjrel:
|
||||
oparg = inst[1]
|
||||
offset = begin[oparg] - pc
|
||||
insts[i] = opname, offset
|
||||
elif opname in self.hasjabs:
|
||||
insts[i] = opname, begin[inst[1]]
|
||||
self.stage = FLAT
|
||||
|
||||
hasjrel = set()
|
||||
for i in dis.hasjrel:
|
||||
hasjrel.add(dis.opname[i])
|
||||
hasjabs = set()
|
||||
for i in dis.hasjabs:
|
||||
hasjabs.add(dis.opname[i])
|
||||
|
||||
def convertArgs(self):
|
||||
"""Convert arguments from symbolic to concrete form"""
|
||||
assert self.stage == FLAT
|
||||
self.consts.insert(0, self.docstring)
|
||||
self.sort_cellvars()
|
||||
for i in range(len(self.insts)):
|
||||
t = self.insts[i]
|
||||
if len(t) == 2:
|
||||
opname, oparg = t
|
||||
conv = self._converters.get(opname, None)
|
||||
if conv:
|
||||
self.insts[i] = opname, conv(self, oparg)
|
||||
self.stage = CONV
|
||||
|
||||
def sort_cellvars(self):
|
||||
"""Sort cellvars in the order of varnames and prune from freevars.
|
||||
"""
|
||||
cells = {}
|
||||
for name in self.cellvars:
|
||||
cells[name] = 1
|
||||
self.cellvars = [name for name in self.varnames
|
||||
if name in cells]
|
||||
for name in self.cellvars:
|
||||
del cells[name]
|
||||
self.cellvars = self.cellvars + cells.keys()
|
||||
self.closure = self.cellvars + self.freevars
|
||||
|
||||
def _lookupName(self, name, list):
|
||||
"""Return index of name in list, appending if necessary
|
||||
|
||||
This routine uses a list instead of a dictionary, because a
|
||||
dictionary can't store two different keys if the keys have the
|
||||
same value but different types, e.g. 2 and 2L. The compiler
|
||||
must treat these two separately, so it does an explicit type
|
||||
comparison before comparing the values.
|
||||
"""
|
||||
t = type(name)
|
||||
for i in range(len(list)):
|
||||
if t == type(list[i]) and list[i] == name:
|
||||
return i
|
||||
end = len(list)
|
||||
list.append(name)
|
||||
return end
|
||||
|
||||
_converters = {}
|
||||
def _convert_LOAD_CONST(self, arg):
|
||||
if hasattr(arg, 'getCode'):
|
||||
arg = arg.getCode()
|
||||
return self._lookupName(arg, self.consts)
|
||||
|
||||
def _convert_LOAD_FAST(self, arg):
|
||||
self._lookupName(arg, self.names)
|
||||
return self._lookupName(arg, self.varnames)
|
||||
_convert_STORE_FAST = _convert_LOAD_FAST
|
||||
_convert_DELETE_FAST = _convert_LOAD_FAST
|
||||
|
||||
def _convert_LOAD_NAME(self, arg):
|
||||
if self.klass is None:
|
||||
self._lookupName(arg, self.varnames)
|
||||
return self._lookupName(arg, self.names)
|
||||
|
||||
def _convert_NAME(self, arg):
|
||||
if self.klass is None:
|
||||
self._lookupName(arg, self.varnames)
|
||||
return self._lookupName(arg, self.names)
|
||||
_convert_STORE_NAME = _convert_NAME
|
||||
_convert_DELETE_NAME = _convert_NAME
|
||||
_convert_IMPORT_NAME = _convert_NAME
|
||||
_convert_IMPORT_FROM = _convert_NAME
|
||||
_convert_STORE_ATTR = _convert_NAME
|
||||
_convert_LOAD_ATTR = _convert_NAME
|
||||
_convert_DELETE_ATTR = _convert_NAME
|
||||
_convert_LOAD_GLOBAL = _convert_NAME
|
||||
_convert_STORE_GLOBAL = _convert_NAME
|
||||
_convert_DELETE_GLOBAL = _convert_NAME
|
||||
|
||||
def _convert_DEREF(self, arg):
|
||||
self._lookupName(arg, self.names)
|
||||
self._lookupName(arg, self.varnames)
|
||||
return self._lookupName(arg, self.closure)
|
||||
_convert_LOAD_DEREF = _convert_DEREF
|
||||
_convert_STORE_DEREF = _convert_DEREF
|
||||
|
||||
def _convert_LOAD_CLOSURE(self, arg):
|
||||
self._lookupName(arg, self.varnames)
|
||||
return self._lookupName(arg, self.closure)
|
||||
|
||||
_cmp = list(dis.cmp_op)
|
||||
def _convert_COMPARE_OP(self, arg):
|
||||
return self._cmp.index(arg)
|
||||
|
||||
# similarly for other opcodes...
|
||||
|
||||
for name, obj in locals().items():
|
||||
if name[:9] == "_convert_":
|
||||
opname = name[9:]
|
||||
_converters[opname] = obj
|
||||
del name, obj, opname
|
||||
|
||||
def makeByteCode(self):
|
||||
assert self.stage == CONV
|
||||
self.lnotab = lnotab = LineAddrTable()
|
||||
for t in self.insts:
|
||||
opname = t[0]
|
||||
if len(t) == 1:
|
||||
lnotab.addCode(self.opnum[opname])
|
||||
else:
|
||||
oparg = t[1]
|
||||
if opname == "SET_LINENO":
|
||||
lnotab.nextLine(oparg)
|
||||
continue
|
||||
hi, lo = twobyte(oparg)
|
||||
try:
|
||||
lnotab.addCode(self.opnum[opname], lo, hi)
|
||||
except ValueError:
|
||||
print opname, oparg
|
||||
print self.opnum[opname], lo, hi
|
||||
raise
|
||||
self.stage = DONE
|
||||
|
||||
opnum = {}
|
||||
for num in range(len(dis.opname)):
|
||||
opnum[dis.opname[num]] = num
|
||||
del num
|
||||
|
||||
def newCodeObject(self):
|
||||
assert self.stage == DONE
|
||||
if (self.flags & CO_NEWLOCALS) == 0:
|
||||
nlocals = 0
|
||||
else:
|
||||
nlocals = len(self.varnames)
|
||||
argcount = self.argcount
|
||||
if self.flags & CO_VARKEYWORDS:
|
||||
argcount = argcount - 1
|
||||
return types.CodeType(argcount, nlocals, self.stacksize, self.flags,
|
||||
self.lnotab.getCode(), self.getConsts(),
|
||||
tuple(self.names), tuple(self.varnames),
|
||||
self.filename, self.name, self.lnotab.firstline,
|
||||
self.lnotab.getTable(), tuple(self.freevars),
|
||||
tuple(self.cellvars))
|
||||
|
||||
def getConsts(self):
|
||||
"""Return a tuple for the const slot of the code object
|
||||
|
||||
Must convert references to code (MAKE_FUNCTION) to code
|
||||
objects recursively.
|
||||
"""
|
||||
l = []
|
||||
for elt in self.consts:
|
||||
if isinstance(elt, PyFlowGraph):
|
||||
elt = elt.getCode()
|
||||
l.append(elt)
|
||||
return tuple(l)
|
||||
|
||||
def isJump(opname):
|
||||
if opname[:4] == 'JUMP':
|
||||
return 1
|
||||
|
||||
class TupleArg:
|
||||
"""Helper for marking func defs with nested tuples in arglist"""
|
||||
def __init__(self, count, names):
|
||||
self.count = count
|
||||
self.names = names
|
||||
def __repr__(self):
|
||||
return "TupleArg(%s, %s)" % (self.count, self.names)
|
||||
def getName(self):
|
||||
return ".%d" % self.count
|
||||
|
||||
def getArgCount(args):
|
||||
argcount = len(args)
|
||||
if args:
|
||||
for arg in args:
|
||||
if isinstance(arg, TupleArg):
|
||||
numNames = len(misc.flatten(arg.names))
|
||||
argcount = argcount - numNames
|
||||
return argcount
|
||||
|
||||
def twobyte(val):
|
||||
"""Convert an int argument into high and low bytes"""
|
||||
assert isinstance(val, int)
|
||||
return divmod(val, 256)
|
||||
|
||||
class LineAddrTable:
|
||||
"""lnotab
|
||||
|
||||
This class builds the lnotab, which is documented in compile.c.
|
||||
Here's a brief recap:
|
||||
|
||||
For each SET_LINENO instruction after the first one, two bytes are
|
||||
added to lnotab. (In some cases, multiple two-byte entries are
|
||||
added.) The first byte is the distance in bytes between the
|
||||
instruction for the last SET_LINENO and the current SET_LINENO.
|
||||
The second byte is offset in line numbers. If either offset is
|
||||
greater than 255, multiple two-byte entries are added -- see
|
||||
compile.c for the delicate details.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.code = []
|
||||
self.codeOffset = 0
|
||||
self.firstline = 0
|
||||
self.lastline = 0
|
||||
self.lastoff = 0
|
||||
self.lnotab = []
|
||||
|
||||
def addCode(self, *args):
|
||||
for arg in args:
|
||||
self.code.append(chr(arg))
|
||||
self.codeOffset = self.codeOffset + len(args)
|
||||
|
||||
def nextLine(self, lineno):
|
||||
if self.firstline == 0:
|
||||
self.firstline = lineno
|
||||
self.lastline = lineno
|
||||
else:
|
||||
# compute deltas
|
||||
addr = self.codeOffset - self.lastoff
|
||||
line = lineno - self.lastline
|
||||
# Python assumes that lineno always increases with
|
||||
# increasing bytecode address (lnotab is unsigned char).
|
||||
# Depending on when SET_LINENO instructions are emitted
|
||||
# this is not always true. Consider the code:
|
||||
# a = (1,
|
||||
# b)
|
||||
# In the bytecode stream, the assignment to "a" occurs
|
||||
# after the loading of "b". This works with the C Python
|
||||
# compiler because it only generates a SET_LINENO instruction
|
||||
# for the assignment.
|
||||
if line >= 0:
|
||||
push = self.lnotab.append
|
||||
while addr > 255:
|
||||
push(255); push(0)
|
||||
addr -= 255
|
||||
while line > 255:
|
||||
push(addr); push(255)
|
||||
line -= 255
|
||||
addr = 0
|
||||
if addr > 0 or line > 0:
|
||||
push(addr); push(line)
|
||||
self.lastline = lineno
|
||||
self.lastoff = self.codeOffset
|
||||
|
||||
def getCode(self):
|
||||
return ''.join(self.code)
|
||||
|
||||
def getTable(self):
|
||||
return ''.join(map(chr, self.lnotab))
|
||||
|
||||
class StackDepthTracker:
|
||||
# XXX 1. need to keep track of stack depth on jumps
|
||||
# XXX 2. at least partly as a result, this code is broken
|
||||
|
||||
def findDepth(self, insts, debug=0):
|
||||
depth = 0
|
||||
maxDepth = 0
|
||||
for i in insts:
|
||||
opname = i[0]
|
||||
if debug:
|
||||
print i,
|
||||
delta = self.effect.get(opname, None)
|
||||
if delta is not None:
|
||||
depth = depth + delta
|
||||
else:
|
||||
# now check patterns
|
||||
for pat, pat_delta in self.patterns:
|
||||
if opname[:len(pat)] == pat:
|
||||
delta = pat_delta
|
||||
depth = depth + delta
|
||||
break
|
||||
# if we still haven't found a match
|
||||
if delta is None:
|
||||
meth = getattr(self, opname, None)
|
||||
if meth is not None:
|
||||
depth = depth + meth(i[1])
|
||||
if depth > maxDepth:
|
||||
maxDepth = depth
|
||||
if debug:
|
||||
print depth, maxDepth
|
||||
return maxDepth
|
||||
|
||||
effect = {
|
||||
'POP_TOP': -1,
|
||||
'DUP_TOP': 1,
|
||||
'LIST_APPEND': -1,
|
||||
'SET_ADD': -1,
|
||||
'MAP_ADD': -2,
|
||||
'SLICE+1': -1,
|
||||
'SLICE+2': -1,
|
||||
'SLICE+3': -2,
|
||||
'STORE_SLICE+0': -1,
|
||||
'STORE_SLICE+1': -2,
|
||||
'STORE_SLICE+2': -2,
|
||||
'STORE_SLICE+3': -3,
|
||||
'DELETE_SLICE+0': -1,
|
||||
'DELETE_SLICE+1': -2,
|
||||
'DELETE_SLICE+2': -2,
|
||||
'DELETE_SLICE+3': -3,
|
||||
'STORE_SUBSCR': -3,
|
||||
'DELETE_SUBSCR': -2,
|
||||
# PRINT_EXPR?
|
||||
'PRINT_ITEM': -1,
|
||||
'RETURN_VALUE': -1,
|
||||
'YIELD_VALUE': -1,
|
||||
'EXEC_STMT': -3,
|
||||
'BUILD_CLASS': -2,
|
||||
'STORE_NAME': -1,
|
||||
'STORE_ATTR': -2,
|
||||
'DELETE_ATTR': -1,
|
||||
'STORE_GLOBAL': -1,
|
||||
'BUILD_MAP': 1,
|
||||
'COMPARE_OP': -1,
|
||||
'STORE_FAST': -1,
|
||||
'IMPORT_STAR': -1,
|
||||
'IMPORT_NAME': -1,
|
||||
'IMPORT_FROM': 1,
|
||||
'LOAD_ATTR': 0, # unlike other loads
|
||||
# close enough...
|
||||
'SETUP_EXCEPT': 3,
|
||||
'SETUP_FINALLY': 3,
|
||||
'FOR_ITER': 1,
|
||||
'WITH_CLEANUP': -1,
|
||||
}
|
||||
# use pattern match
|
||||
patterns = [
|
||||
('BINARY_', -1),
|
||||
('LOAD_', 1),
|
||||
]
|
||||
|
||||
def UNPACK_SEQUENCE(self, count):
|
||||
return count-1
|
||||
def BUILD_TUPLE(self, count):
|
||||
return -count+1
|
||||
def BUILD_LIST(self, count):
|
||||
return -count+1
|
||||
def BUILD_SET(self, count):
|
||||
return -count+1
|
||||
def CALL_FUNCTION(self, argc):
|
||||
hi, lo = divmod(argc, 256)
|
||||
return -(lo + hi * 2)
|
||||
def CALL_FUNCTION_VAR(self, argc):
|
||||
return self.CALL_FUNCTION(argc)-1
|
||||
def CALL_FUNCTION_KW(self, argc):
|
||||
return self.CALL_FUNCTION(argc)-1
|
||||
def CALL_FUNCTION_VAR_KW(self, argc):
|
||||
return self.CALL_FUNCTION(argc)-2
|
||||
def MAKE_FUNCTION(self, argc):
|
||||
return -argc
|
||||
def MAKE_CLOSURE(self, argc):
|
||||
# XXX need to account for free variables too!
|
||||
return -argc
|
||||
def BUILD_SLICE(self, argc):
|
||||
if argc == 2:
|
||||
return -1
|
||||
elif argc == 3:
|
||||
return -2
|
||||
def DUP_TOPX(self, argc):
|
||||
return argc
|
||||
|
||||
findDepth = StackDepthTracker().findDepth
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,462 +0,0 @@
|
|||
"""Module symbol-table generator"""
|
||||
|
||||
from compiler import ast
|
||||
from compiler.consts import SC_LOCAL, SC_GLOBAL_IMPLICIT, SC_GLOBAL_EXPLICIT, \
|
||||
SC_FREE, SC_CELL, SC_UNKNOWN
|
||||
from compiler.misc import mangle
|
||||
import types
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
MANGLE_LEN = 256
|
||||
|
||||
class Scope:
|
||||
# XXX how much information do I need about each name?
|
||||
def __init__(self, name, module, klass=None):
|
||||
self.name = name
|
||||
self.module = module
|
||||
self.defs = {}
|
||||
self.uses = {}
|
||||
self.globals = {}
|
||||
self.params = {}
|
||||
self.frees = {}
|
||||
self.cells = {}
|
||||
self.children = []
|
||||
# nested is true if the class could contain free variables,
|
||||
# i.e. if it is nested within another function.
|
||||
self.nested = None
|
||||
self.generator = None
|
||||
self.klass = None
|
||||
if klass is not None:
|
||||
for i in range(len(klass)):
|
||||
if klass[i] != '_':
|
||||
self.klass = klass[i:]
|
||||
break
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % (self.__class__.__name__, self.name)
|
||||
|
||||
def mangle(self, name):
|
||||
if self.klass is None:
|
||||
return name
|
||||
return mangle(name, self.klass)
|
||||
|
||||
def add_def(self, name):
|
||||
self.defs[self.mangle(name)] = 1
|
||||
|
||||
def add_use(self, name):
|
||||
self.uses[self.mangle(name)] = 1
|
||||
|
||||
def add_global(self, name):
|
||||
name = self.mangle(name)
|
||||
if name in self.uses or name in self.defs:
|
||||
pass # XXX warn about global following def/use
|
||||
if name in self.params:
|
||||
raise SyntaxError, "%s in %s is global and parameter" % \
|
||||
(name, self.name)
|
||||
self.globals[name] = 1
|
||||
self.module.add_def(name)
|
||||
|
||||
def add_param(self, name):
|
||||
name = self.mangle(name)
|
||||
self.defs[name] = 1
|
||||
self.params[name] = 1
|
||||
|
||||
def get_names(self):
|
||||
d = {}
|
||||
d.update(self.defs)
|
||||
d.update(self.uses)
|
||||
d.update(self.globals)
|
||||
return d.keys()
|
||||
|
||||
def add_child(self, child):
|
||||
self.children.append(child)
|
||||
|
||||
def get_children(self):
|
||||
return self.children
|
||||
|
||||
def DEBUG(self):
|
||||
print >> sys.stderr, self.name, self.nested and "nested" or ""
|
||||
print >> sys.stderr, "\tglobals: ", self.globals
|
||||
print >> sys.stderr, "\tcells: ", self.cells
|
||||
print >> sys.stderr, "\tdefs: ", self.defs
|
||||
print >> sys.stderr, "\tuses: ", self.uses
|
||||
print >> sys.stderr, "\tfrees:", self.frees
|
||||
|
||||
def check_name(self, name):
|
||||
"""Return scope of name.
|
||||
|
||||
The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
|
||||
"""
|
||||
if name in self.globals:
|
||||
return SC_GLOBAL_EXPLICIT
|
||||
if name in self.cells:
|
||||
return SC_CELL
|
||||
if name in self.defs:
|
||||
return SC_LOCAL
|
||||
if self.nested and (name in self.frees or name in self.uses):
|
||||
return SC_FREE
|
||||
if self.nested:
|
||||
return SC_UNKNOWN
|
||||
else:
|
||||
return SC_GLOBAL_IMPLICIT
|
||||
|
||||
def get_free_vars(self):
|
||||
if not self.nested:
|
||||
return ()
|
||||
free = {}
|
||||
free.update(self.frees)
|
||||
for name in self.uses.keys():
|
||||
if name not in self.defs and name not in self.globals:
|
||||
free[name] = 1
|
||||
return free.keys()
|
||||
|
||||
def handle_children(self):
|
||||
for child in self.children:
|
||||
frees = child.get_free_vars()
|
||||
globals = self.add_frees(frees)
|
||||
for name in globals:
|
||||
child.force_global(name)
|
||||
|
||||
def force_global(self, name):
|
||||
"""Force name to be global in scope.
|
||||
|
||||
Some child of the current node had a free reference to name.
|
||||
When the child was processed, it was labelled a free
|
||||
variable. Now that all its enclosing scope have been
|
||||
processed, the name is known to be a global or builtin. So
|
||||
walk back down the child chain and set the name to be global
|
||||
rather than free.
|
||||
|
||||
Be careful to stop if a child does not think the name is
|
||||
free.
|
||||
"""
|
||||
self.globals[name] = 1
|
||||
if name in self.frees:
|
||||
del self.frees[name]
|
||||
for child in self.children:
|
||||
if child.check_name(name) == SC_FREE:
|
||||
child.force_global(name)
|
||||
|
||||
def add_frees(self, names):
|
||||
"""Process list of free vars from nested scope.
|
||||
|
||||
Returns a list of names that are either 1) declared global in the
|
||||
parent or 2) undefined in a top-level parent. In either case,
|
||||
the nested scope should treat them as globals.
|
||||
"""
|
||||
child_globals = []
|
||||
for name in names:
|
||||
sc = self.check_name(name)
|
||||
if self.nested:
|
||||
if sc == SC_UNKNOWN or sc == SC_FREE \
|
||||
or isinstance(self, ClassScope):
|
||||
self.frees[name] = 1
|
||||
elif sc == SC_GLOBAL_IMPLICIT:
|
||||
child_globals.append(name)
|
||||
elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
|
||||
self.cells[name] = 1
|
||||
elif sc != SC_CELL:
|
||||
child_globals.append(name)
|
||||
else:
|
||||
if sc == SC_LOCAL:
|
||||
self.cells[name] = 1
|
||||
elif sc != SC_CELL:
|
||||
child_globals.append(name)
|
||||
return child_globals
|
||||
|
||||
def get_cell_vars(self):
|
||||
return self.cells.keys()
|
||||
|
||||
class ModuleScope(Scope):
|
||||
__super_init = Scope.__init__
|
||||
|
||||
def __init__(self):
|
||||
self.__super_init("global", self)
|
||||
|
||||
class FunctionScope(Scope):
|
||||
pass
|
||||
|
||||
class GenExprScope(Scope):
|
||||
__super_init = Scope.__init__
|
||||
|
||||
__counter = 1
|
||||
|
||||
def __init__(self, module, klass=None):
|
||||
i = self.__counter
|
||||
self.__counter += 1
|
||||
self.__super_init("generator expression<%d>"%i, module, klass)
|
||||
self.add_param('.0')
|
||||
|
||||
def get_names(self):
|
||||
keys = Scope.get_names(self)
|
||||
return keys
|
||||
|
||||
class LambdaScope(FunctionScope):
|
||||
__super_init = Scope.__init__
|
||||
|
||||
__counter = 1
|
||||
|
||||
def __init__(self, module, klass=None):
|
||||
i = self.__counter
|
||||
self.__counter += 1
|
||||
self.__super_init("lambda.%d" % i, module, klass)
|
||||
|
||||
class ClassScope(Scope):
|
||||
__super_init = Scope.__init__
|
||||
|
||||
def __init__(self, name, module):
|
||||
self.__super_init(name, module, name)
|
||||
|
||||
class SymbolVisitor:
|
||||
def __init__(self):
|
||||
self.scopes = {}
|
||||
self.klass = None
|
||||
|
||||
# node that define new scopes
|
||||
|
||||
def visitModule(self, node):
|
||||
scope = self.module = self.scopes[node] = ModuleScope()
|
||||
self.visit(node.node, scope)
|
||||
|
||||
visitExpression = visitModule
|
||||
|
||||
def visitFunction(self, node, parent):
|
||||
if node.decorators:
|
||||
self.visit(node.decorators, parent)
|
||||
parent.add_def(node.name)
|
||||
for n in node.defaults:
|
||||
self.visit(n, parent)
|
||||
scope = FunctionScope(node.name, self.module, self.klass)
|
||||
if parent.nested or isinstance(parent, FunctionScope):
|
||||
scope.nested = 1
|
||||
self.scopes[node] = scope
|
||||
self._do_args(scope, node.argnames)
|
||||
self.visit(node.code, scope)
|
||||
self.handle_free_vars(scope, parent)
|
||||
|
||||
def visitGenExpr(self, node, parent):
|
||||
scope = GenExprScope(self.module, self.klass);
|
||||
if parent.nested or isinstance(parent, FunctionScope) \
|
||||
or isinstance(parent, GenExprScope):
|
||||
scope.nested = 1
|
||||
|
||||
self.scopes[node] = scope
|
||||
self.visit(node.code, scope)
|
||||
|
||||
self.handle_free_vars(scope, parent)
|
||||
|
||||
def visitGenExprInner(self, node, scope):
|
||||
for genfor in node.quals:
|
||||
self.visit(genfor, scope)
|
||||
|
||||
self.visit(node.expr, scope)
|
||||
|
||||
def visitGenExprFor(self, node, scope):
|
||||
self.visit(node.assign, scope, 1)
|
||||
self.visit(node.iter, scope)
|
||||
for if_ in node.ifs:
|
||||
self.visit(if_, scope)
|
||||
|
||||
def visitGenExprIf(self, node, scope):
|
||||
self.visit(node.test, scope)
|
||||
|
||||
def visitLambda(self, node, parent, assign=0):
|
||||
# Lambda is an expression, so it could appear in an expression
|
||||
# context where assign is passed. The transformer should catch
|
||||
# any code that has a lambda on the left-hand side.
|
||||
assert not assign
|
||||
|
||||
for n in node.defaults:
|
||||
self.visit(n, parent)
|
||||
scope = LambdaScope(self.module, self.klass)
|
||||
if parent.nested or isinstance(parent, FunctionScope):
|
||||
scope.nested = 1
|
||||
self.scopes[node] = scope
|
||||
self._do_args(scope, node.argnames)
|
||||
self.visit(node.code, scope)
|
||||
self.handle_free_vars(scope, parent)
|
||||
|
||||
def _do_args(self, scope, args):
|
||||
for name in args:
|
||||
if type(name) == types.TupleType:
|
||||
self._do_args(scope, name)
|
||||
else:
|
||||
scope.add_param(name)
|
||||
|
||||
def handle_free_vars(self, scope, parent):
|
||||
parent.add_child(scope)
|
||||
scope.handle_children()
|
||||
|
||||
def visitClass(self, node, parent):
|
||||
parent.add_def(node.name)
|
||||
for n in node.bases:
|
||||
self.visit(n, parent)
|
||||
scope = ClassScope(node.name, self.module)
|
||||
if parent.nested or isinstance(parent, FunctionScope):
|
||||
scope.nested = 1
|
||||
if node.doc is not None:
|
||||
scope.add_def('__doc__')
|
||||
scope.add_def('__module__')
|
||||
self.scopes[node] = scope
|
||||
prev = self.klass
|
||||
self.klass = node.name
|
||||
self.visit(node.code, scope)
|
||||
self.klass = prev
|
||||
self.handle_free_vars(scope, parent)
|
||||
|
||||
# name can be a def or a use
|
||||
|
||||
# XXX a few calls and nodes expect a third "assign" arg that is
|
||||
# true if the name is being used as an assignment. only
|
||||
# expressions contained within statements may have the assign arg.
|
||||
|
||||
def visitName(self, node, scope, assign=0):
|
||||
if assign:
|
||||
scope.add_def(node.name)
|
||||
else:
|
||||
scope.add_use(node.name)
|
||||
|
||||
# operations that bind new names
|
||||
|
||||
def visitFor(self, node, scope):
|
||||
self.visit(node.assign, scope, 1)
|
||||
self.visit(node.list, scope)
|
||||
self.visit(node.body, scope)
|
||||
if node.else_:
|
||||
self.visit(node.else_, scope)
|
||||
|
||||
def visitFrom(self, node, scope):
|
||||
for name, asname in node.names:
|
||||
if name == "*":
|
||||
continue
|
||||
scope.add_def(asname or name)
|
||||
|
||||
def visitImport(self, node, scope):
|
||||
for name, asname in node.names:
|
||||
i = name.find(".")
|
||||
if i > -1:
|
||||
name = name[:i]
|
||||
scope.add_def(asname or name)
|
||||
|
||||
def visitGlobal(self, node, scope):
|
||||
for name in node.names:
|
||||
scope.add_global(name)
|
||||
|
||||
def visitAssign(self, node, scope):
|
||||
"""Propagate assignment flag down to child nodes.
|
||||
|
||||
The Assign node doesn't itself contains the variables being
|
||||
assigned to. Instead, the children in node.nodes are visited
|
||||
with the assign flag set to true. When the names occur in
|
||||
those nodes, they are marked as defs.
|
||||
|
||||
Some names that occur in an assignment target are not bound by
|
||||
the assignment, e.g. a name occurring inside a slice. The
|
||||
visitor handles these nodes specially; they do not propagate
|
||||
the assign flag to their children.
|
||||
"""
|
||||
for n in node.nodes:
|
||||
self.visit(n, scope, 1)
|
||||
self.visit(node.expr, scope)
|
||||
|
||||
def visitAssName(self, node, scope, assign=1):
|
||||
scope.add_def(node.name)
|
||||
|
||||
def visitAssAttr(self, node, scope, assign=0):
|
||||
self.visit(node.expr, scope, 0)
|
||||
|
||||
def visitSubscript(self, node, scope, assign=0):
|
||||
self.visit(node.expr, scope, 0)
|
||||
for n in node.subs:
|
||||
self.visit(n, scope, 0)
|
||||
|
||||
def visitSlice(self, node, scope, assign=0):
|
||||
self.visit(node.expr, scope, 0)
|
||||
if node.lower:
|
||||
self.visit(node.lower, scope, 0)
|
||||
if node.upper:
|
||||
self.visit(node.upper, scope, 0)
|
||||
|
||||
def visitAugAssign(self, node, scope):
|
||||
# If the LHS is a name, then this counts as assignment.
|
||||
# Otherwise, it's just use.
|
||||
self.visit(node.node, scope)
|
||||
if isinstance(node.node, ast.Name):
|
||||
self.visit(node.node, scope, 1) # XXX worry about this
|
||||
self.visit(node.expr, scope)
|
||||
|
||||
# prune if statements if tests are false
|
||||
|
||||
_const_types = types.StringType, types.IntType, types.FloatType
|
||||
|
||||
def visitIf(self, node, scope):
|
||||
for test, body in node.tests:
|
||||
if isinstance(test, ast.Const):
|
||||
if type(test.value) in self._const_types:
|
||||
if not test.value:
|
||||
continue
|
||||
self.visit(test, scope)
|
||||
self.visit(body, scope)
|
||||
if node.else_:
|
||||
self.visit(node.else_, scope)
|
||||
|
||||
# a yield statement signals a generator
|
||||
|
||||
def visitYield(self, node, scope):
|
||||
scope.generator = 1
|
||||
self.visit(node.value, scope)
|
||||
|
||||
def list_eq(l1, l2):
|
||||
return sorted(l1) == sorted(l2)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
from compiler import parseFile, walk
|
||||
import symtable
|
||||
|
||||
def get_names(syms):
|
||||
return [s for s in [s.get_name() for s in syms.get_symbols()]
|
||||
if not (s.startswith('_[') or s.startswith('.'))]
|
||||
|
||||
for file in sys.argv[1:]:
|
||||
print file
|
||||
f = open(file)
|
||||
buf = f.read()
|
||||
f.close()
|
||||
syms = symtable.symtable(buf, file, "exec")
|
||||
mod_names = get_names(syms)
|
||||
tree = parseFile(file)
|
||||
s = SymbolVisitor()
|
||||
walk(tree, s)
|
||||
|
||||
# compare module-level symbols
|
||||
names2 = s.scopes[tree].get_names()
|
||||
|
||||
if not list_eq(mod_names, names2):
|
||||
print
|
||||
print "oops", file
|
||||
print sorted(mod_names)
|
||||
print sorted(names2)
|
||||
sys.exit(-1)
|
||||
|
||||
d = {}
|
||||
d.update(s.scopes)
|
||||
del d[tree]
|
||||
scopes = d.values()
|
||||
del d
|
||||
|
||||
for s in syms.get_symbols():
|
||||
if s.is_namespace():
|
||||
l = [sc for sc in scopes
|
||||
if sc.name == s.get_name()]
|
||||
if len(l) > 1:
|
||||
print "skipping", s.get_name()
|
||||
else:
|
||||
if not list_eq(get_names(s.get_namespace()),
|
||||
l[0].get_names()):
|
||||
print s.get_name()
|
||||
print sorted(get_names(s.get_namespace()))
|
||||
print sorted(l[0].get_names())
|
||||
sys.exit(-1)
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
"""Check for errs in the AST.
|
||||
|
||||
The Python parser does not catch all syntax errors. Others, like
|
||||
assignments with invalid targets, are caught in the code generation
|
||||
phase.
|
||||
|
||||
The compiler package catches some errors in the transformer module.
|
||||
But it seems clearer to write checkers that use the AST to detect
|
||||
errors.
|
||||
"""
|
||||
|
||||
from compiler import ast, walk
|
||||
|
||||
def check(tree, multi=None):
|
||||
v = SyntaxErrorChecker(multi)
|
||||
walk(tree, v)
|
||||
return v.errors
|
||||
|
||||
class SyntaxErrorChecker:
|
||||
"""A visitor to find syntax errors in the AST."""
|
||||
|
||||
def __init__(self, multi=None):
|
||||
"""Create new visitor object.
|
||||
|
||||
If optional argument multi is not None, then print messages
|
||||
for each error rather than raising a SyntaxError for the
|
||||
first.
|
||||
"""
|
||||
self.multi = multi
|
||||
self.errors = 0
|
||||
|
||||
def error(self, node, msg):
|
||||
self.errors = self.errors + 1
|
||||
if self.multi is not None:
|
||||
print "%s:%s: %s" % (node.filename, node.lineno, msg)
|
||||
else:
|
||||
raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
|
||||
|
||||
def visitAssign(self, node):
|
||||
# the transformer module handles many of these
|
||||
pass
|
||||
## for target in node.nodes:
|
||||
## if isinstance(target, ast.AssList):
|
||||
## if target.lineno is None:
|
||||
## target.lineno = node.lineno
|
||||
## self.error(target, "can't assign to list comprehension")
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,113 +0,0 @@
|
|||
from compiler import ast
|
||||
|
||||
# XXX should probably rename ASTVisitor to ASTWalker
|
||||
# XXX can it be made even more generic?
|
||||
|
||||
class ASTVisitor:
|
||||
"""Performs a depth-first walk of the AST
|
||||
|
||||
The ASTVisitor will walk the AST, performing either a preorder or
|
||||
postorder traversal depending on which method is called.
|
||||
|
||||
methods:
|
||||
preorder(tree, visitor)
|
||||
postorder(tree, visitor)
|
||||
tree: an instance of ast.Node
|
||||
visitor: an instance with visitXXX methods
|
||||
|
||||
The ASTVisitor is responsible for walking over the tree in the
|
||||
correct order. For each node, it checks the visitor argument for
|
||||
a method named 'visitNodeType' where NodeType is the name of the
|
||||
node's class, e.g. Class. If the method exists, it is called
|
||||
with the node as its sole argument.
|
||||
|
||||
The visitor method for a particular node type can control how
|
||||
child nodes are visited during a preorder walk. (It can't control
|
||||
the order during a postorder walk, because it is called _after_
|
||||
the walk has occurred.) The ASTVisitor modifies the visitor
|
||||
argument by adding a visit method to the visitor; this method can
|
||||
be used to visit a child node of arbitrary type.
|
||||
"""
|
||||
|
||||
VERBOSE = 0
|
||||
|
||||
def __init__(self):
|
||||
self.node = None
|
||||
self._cache = {}
|
||||
|
||||
def default(self, node, *args):
|
||||
for child in node.getChildNodes():
|
||||
self.dispatch(child, *args)
|
||||
|
||||
def dispatch(self, node, *args):
|
||||
self.node = node
|
||||
klass = node.__class__
|
||||
meth = self._cache.get(klass, None)
|
||||
if meth is None:
|
||||
className = klass.__name__
|
||||
meth = getattr(self.visitor, 'visit' + className, self.default)
|
||||
self._cache[klass] = meth
|
||||
## if self.VERBOSE > 0:
|
||||
## className = klass.__name__
|
||||
## if self.VERBOSE == 1:
|
||||
## if meth == 0:
|
||||
## print "dispatch", className
|
||||
## else:
|
||||
## print "dispatch", className, (meth and meth.__name__ or '')
|
||||
return meth(node, *args)
|
||||
|
||||
def preorder(self, tree, visitor, *args):
|
||||
"""Do preorder walk of tree using visitor"""
|
||||
self.visitor = visitor
|
||||
visitor.visit = self.dispatch
|
||||
self.dispatch(tree, *args) # XXX *args make sense?
|
||||
|
||||
class ExampleASTVisitor(ASTVisitor):
|
||||
"""Prints examples of the nodes that aren't visited
|
||||
|
||||
This visitor-driver is only useful for development, when it's
|
||||
helpful to develop a visitor incrementally, and get feedback on what
|
||||
you still have to do.
|
||||
"""
|
||||
examples = {}
|
||||
|
||||
def dispatch(self, node, *args):
|
||||
self.node = node
|
||||
meth = self._cache.get(node.__class__, None)
|
||||
className = node.__class__.__name__
|
||||
if meth is None:
|
||||
meth = getattr(self.visitor, 'visit' + className, 0)
|
||||
self._cache[node.__class__] = meth
|
||||
if self.VERBOSE > 1:
|
||||
print "dispatch", className, (meth and meth.__name__ or '')
|
||||
if meth:
|
||||
meth(node, *args)
|
||||
elif self.VERBOSE > 0:
|
||||
klass = node.__class__
|
||||
if klass not in self.examples:
|
||||
self.examples[klass] = klass
|
||||
print
|
||||
print self.visitor
|
||||
print klass
|
||||
for attr in dir(node):
|
||||
if attr[0] != '_':
|
||||
print "\t", "%-12.12s" % attr, getattr(node, attr)
|
||||
print
|
||||
return self.default(node, *args)
|
||||
|
||||
# XXX this is an API change
|
||||
|
||||
_walker = ASTVisitor
|
||||
def walk(tree, visitor, walker=None, verbose=None):
|
||||
if walker is None:
|
||||
walker = _walker()
|
||||
if verbose is not None:
|
||||
walker.VERBOSE = verbose
|
||||
walker.preorder(tree, visitor)
|
||||
return walker.visitor
|
||||
|
||||
def dumpNode(node):
|
||||
print node.__class__
|
||||
for attr in dir(node):
|
||||
if attr[0] != '_':
|
||||
print "\t", "%-10.10s" % attr, getattr(node, attr)
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
/* Generated automatically from /Users/build/platform_darwin/src/Python-2.7.6/Modules/config.c.in by makesetup. */
|
||||
/* -*- C -*- ***********************************************
|
||||
Copyright (c) 2000, BeOpen.com.
|
||||
Copyright (c) 1995-2000, Corporation for National Research Initiatives.
|
||||
Copyright (c) 1990-1995, Stichting Mathematisch Centrum.
|
||||
All rights reserved.
|
||||
|
||||
See the file "Misc/COPYRIGHT" for information on usage and
|
||||
redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
|
||||
******************************************************************/
|
||||
|
||||
/* Module configuration */
|
||||
|
||||
/* !!! !!! !!! This file is edited by the makesetup script !!! !!! !!! */
|
||||
|
||||
/* This file contains the table of built-in modules.
|
||||
See init_builtin() in import.c. */
|
||||
|
||||
#include "Python.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
extern void initthread(void);
|
||||
extern void initsignal(void);
|
||||
extern void initposix(void);
|
||||
extern void initerrno(void);
|
||||
extern void initpwd(void);
|
||||
extern void init_sre(void);
|
||||
extern void init_codecs(void);
|
||||
extern void init_weakref(void);
|
||||
extern void initzipimport(void);
|
||||
extern void init_symtable(void);
|
||||
extern void initxxsubtype(void);
|
||||
/* -- ADDMODULE MARKER 1 -- */
|
||||
|
||||
extern void PyMarshal_Init(void);
|
||||
extern void initimp(void);
|
||||
extern void initgc(void);
|
||||
extern void init_ast(void);
|
||||
extern void _PyWarnings_Init(void);
|
||||
|
||||
struct _inittab _PyImport_Inittab[] = {
|
||||
|
||||
{"thread", initthread},
|
||||
{"signal", initsignal},
|
||||
{"posix", initposix},
|
||||
{"errno", initerrno},
|
||||
{"pwd", initpwd},
|
||||
{"_sre", init_sre},
|
||||
{"_codecs", init_codecs},
|
||||
{"_weakref", init_weakref},
|
||||
{"zipimport", initzipimport},
|
||||
{"_symtable", init_symtable},
|
||||
{"xxsubtype", initxxsubtype},
|
||||
/* -- ADDMODULE MARKER 2 -- */
|
||||
|
||||
/* This module lives in marshal.c */
|
||||
{"marshal", PyMarshal_Init},
|
||||
|
||||
/* This lives in import.c */
|
||||
{"imp", initimp},
|
||||
|
||||
/* This lives in Python/Python-ast.c */
|
||||
{"_ast", init_ast},
|
||||
|
||||
/* These entries are here for sys.builtin_module_names */
|
||||
{"__main__", NULL},
|
||||
{"__builtin__", NULL},
|
||||
{"sys", NULL},
|
||||
{"exceptions", NULL},
|
||||
|
||||
/* This lives in gcmodule.c */
|
||||
{"gc", initgc},
|
||||
|
||||
/* This lives in _warnings.c */
|
||||
{"_warnings", _PyWarnings_Init},
|
||||
|
||||
/* Sentinel */
|
||||
{0, 0}
|
||||
};
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
Binary file not shown.
Binary file not shown.
|
|
@ -1,154 +0,0 @@
|
|||
"""Utilities for with-statement contexts. See PEP 343."""
|
||||
|
||||
import sys
|
||||
from functools import wraps
|
||||
from warnings import warn
|
||||
|
||||
__all__ = ["contextmanager", "nested", "closing"]
|
||||
|
||||
class GeneratorContextManager(object):
|
||||
"""Helper for @contextmanager decorator."""
|
||||
|
||||
def __init__(self, gen):
|
||||
self.gen = gen
|
||||
|
||||
def __enter__(self):
|
||||
try:
|
||||
return self.gen.next()
|
||||
except StopIteration:
|
||||
raise RuntimeError("generator didn't yield")
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if type is None:
|
||||
try:
|
||||
self.gen.next()
|
||||
except StopIteration:
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("generator didn't stop")
|
||||
else:
|
||||
if value is None:
|
||||
# Need to force instantiation so we can reliably
|
||||
# tell if we get the same exception back
|
||||
value = type()
|
||||
try:
|
||||
self.gen.throw(type, value, traceback)
|
||||
raise RuntimeError("generator didn't stop after throw()")
|
||||
except StopIteration, exc:
|
||||
# Suppress the exception *unless* it's the same exception that
|
||||
# was passed to throw(). This prevents a StopIteration
|
||||
# raised inside the "with" statement from being suppressed
|
||||
return exc is not value
|
||||
except:
|
||||
# only re-raise if it's *not* the exception that was
|
||||
# passed to throw(), because __exit__() must not raise
|
||||
# an exception unless __exit__() itself failed. But throw()
|
||||
# has to raise the exception to signal propagation, so this
|
||||
# fixes the impedance mismatch between the throw() protocol
|
||||
# and the __exit__() protocol.
|
||||
#
|
||||
if sys.exc_info()[1] is not value:
|
||||
raise
|
||||
|
||||
|
||||
def contextmanager(func):
|
||||
"""@contextmanager decorator.
|
||||
|
||||
Typical usage:
|
||||
|
||||
@contextmanager
|
||||
def some_generator(<arguments>):
|
||||
<setup>
|
||||
try:
|
||||
yield <value>
|
||||
finally:
|
||||
<cleanup>
|
||||
|
||||
This makes this:
|
||||
|
||||
with some_generator(<arguments>) as <variable>:
|
||||
<body>
|
||||
|
||||
equivalent to this:
|
||||
|
||||
<setup>
|
||||
try:
|
||||
<variable> = <value>
|
||||
<body>
|
||||
finally:
|
||||
<cleanup>
|
||||
|
||||
"""
|
||||
@wraps(func)
|
||||
def helper(*args, **kwds):
|
||||
return GeneratorContextManager(func(*args, **kwds))
|
||||
return helper
|
||||
|
||||
|
||||
@contextmanager
|
||||
def nested(*managers):
|
||||
"""Combine multiple context managers into a single nested context manager.
|
||||
|
||||
This function has been deprecated in favour of the multiple manager form
|
||||
of the with statement.
|
||||
|
||||
The one advantage of this function over the multiple manager form of the
|
||||
with statement is that argument unpacking allows it to be
|
||||
used with a variable number of context managers as follows:
|
||||
|
||||
with nested(*managers):
|
||||
do_something()
|
||||
|
||||
"""
|
||||
warn("With-statements now directly support multiple context managers",
|
||||
DeprecationWarning, 3)
|
||||
exits = []
|
||||
vars = []
|
||||
exc = (None, None, None)
|
||||
try:
|
||||
for mgr in managers:
|
||||
exit = mgr.__exit__
|
||||
enter = mgr.__enter__
|
||||
vars.append(enter())
|
||||
exits.append(exit)
|
||||
yield vars
|
||||
except:
|
||||
exc = sys.exc_info()
|
||||
finally:
|
||||
while exits:
|
||||
exit = exits.pop()
|
||||
try:
|
||||
if exit(*exc):
|
||||
exc = (None, None, None)
|
||||
except:
|
||||
exc = sys.exc_info()
|
||||
if exc != (None, None, None):
|
||||
# Don't rely on sys.exc_info() still containing
|
||||
# the right information. Another exception may
|
||||
# have been raised and caught by an exit method
|
||||
raise exc[0], exc[1], exc[2]
|
||||
|
||||
|
||||
class closing(object):
|
||||
"""Context to automatically close something at the end of a block.
|
||||
|
||||
Code like this:
|
||||
|
||||
with closing(<module>.open(<arguments>)) as f:
|
||||
<block>
|
||||
|
||||
is equivalent to this:
|
||||
|
||||
f = <module>.open(<arguments>)
|
||||
try:
|
||||
<block>
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
"""
|
||||
def __init__(self, thing):
|
||||
self.thing = thing
|
||||
def __enter__(self):
|
||||
return self.thing
|
||||
def __exit__(self, *exc_info):
|
||||
self.thing.close()
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
######################################################################
|
||||
# This file should be kept compatible with Python 2.3, see PEP 291. #
|
||||
######################################################################
|
||||
"""
|
||||
Enough Mach-O to make your head spin.
|
||||
|
||||
See the relevant header files in /usr/include/mach-o
|
||||
|
||||
And also Apple's documentation.
|
||||
"""
|
||||
|
||||
__version__ = '1.0'
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
# Bob Ippolito:
|
||||
"""
|
||||
Ok.. the code to find the filename for __getattr__ should look
|
||||
something like:
|
||||
|
||||
import os
|
||||
from macholib.dyld import dyld_find
|
||||
|
||||
def find_lib(name):
|
||||
possible = ['lib'+name+'.dylib', name+'.dylib',
|
||||
name+'.framework/'+name]
|
||||
for dylib in possible:
|
||||
try:
|
||||
return os.path.realpath(dyld_find(dylib))
|
||||
except ValueError:
|
||||
pass
|
||||
raise ValueError, "%s not found" % (name,)
|
||||
|
||||
It'll have output like this:
|
||||
|
||||
>>> find_lib('pthread')
|
||||
'/usr/lib/libSystem.B.dylib'
|
||||
>>> find_lib('z')
|
||||
'/usr/lib/libz.1.dylib'
|
||||
>>> find_lib('IOKit')
|
||||
'/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit'
|
||||
|
||||
-bob
|
||||
|
||||
"""
|
||||
|
||||
from ctypes.macholib.dyld import dyld_find
|
||||
|
||||
def find_lib(name):
|
||||
possible = ['lib'+name+'.dylib', name+'.dylib', name+'.framework/'+name]
|
||||
for dylib in possible:
|
||||
try:
|
||||
return os.path.realpath(dyld_find(dylib))
|
||||
except ValueError:
|
||||
pass
|
||||
raise ValueError("%s not found" % (name,))
|
||||
|
||||
class MachOTest(unittest.TestCase):
|
||||
if sys.platform == "darwin":
|
||||
def test_find(self):
|
||||
|
||||
self.assertEqual(find_lib('pthread'),
|
||||
'/usr/lib/libSystem.B.dylib')
|
||||
|
||||
result = find_lib('z')
|
||||
self.assertTrue(result.startswith('/usr/lib/libz.1'))
|
||||
self.assertTrue(result.endswith('.dylib'))
|
||||
|
||||
self.assertEqual(find_lib('IOKit'),
|
||||
'/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit')
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
@ -1,129 +0,0 @@
|
|||
# coding: latin-1
|
||||
import unittest
|
||||
import ctypes
|
||||
|
||||
try:
|
||||
ctypes.c_wchar
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
import _ctypes_test
|
||||
dll = ctypes.CDLL(_ctypes_test.__file__)
|
||||
wcslen = dll.my_wcslen
|
||||
wcslen.argtypes = [ctypes.c_wchar_p]
|
||||
|
||||
|
||||
class UnicodeTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.prev_conv_mode = ctypes.set_conversion_mode("ascii", "strict")
|
||||
|
||||
def tearDown(self):
|
||||
ctypes.set_conversion_mode(*self.prev_conv_mode)
|
||||
|
||||
def test_ascii_strict(self):
|
||||
ctypes.set_conversion_mode("ascii", "strict")
|
||||
# no conversions take place with unicode arguments
|
||||
self.assertEqual(wcslen(u"abc"), 3)
|
||||
self.assertEqual(wcslen(u"ab\u2070"), 3)
|
||||
# string args are converted
|
||||
self.assertEqual(wcslen("abc"), 3)
|
||||
self.assertRaises(ctypes.ArgumentError, wcslen, "abä")
|
||||
|
||||
def test_ascii_replace(self):
|
||||
ctypes.set_conversion_mode("ascii", "replace")
|
||||
self.assertEqual(wcslen(u"abc"), 3)
|
||||
self.assertEqual(wcslen(u"ab\u2070"), 3)
|
||||
self.assertEqual(wcslen("abc"), 3)
|
||||
self.assertEqual(wcslen("abä"), 3)
|
||||
|
||||
def test_ascii_ignore(self):
|
||||
ctypes.set_conversion_mode("ascii", "ignore")
|
||||
self.assertEqual(wcslen(u"abc"), 3)
|
||||
self.assertEqual(wcslen(u"ab\u2070"), 3)
|
||||
# ignore error mode skips non-ascii characters
|
||||
self.assertEqual(wcslen("abc"), 3)
|
||||
self.assertEqual(wcslen("äöüß"), 0)
|
||||
|
||||
def test_latin1_strict(self):
|
||||
ctypes.set_conversion_mode("latin-1", "strict")
|
||||
self.assertEqual(wcslen(u"abc"), 3)
|
||||
self.assertEqual(wcslen(u"ab\u2070"), 3)
|
||||
self.assertEqual(wcslen("abc"), 3)
|
||||
self.assertEqual(wcslen("äöüß"), 4)
|
||||
|
||||
def test_buffers(self):
|
||||
ctypes.set_conversion_mode("ascii", "strict")
|
||||
buf = ctypes.create_unicode_buffer("abc")
|
||||
self.assertEqual(len(buf), 3+1)
|
||||
|
||||
ctypes.set_conversion_mode("ascii", "replace")
|
||||
buf = ctypes.create_unicode_buffer("abäöü")
|
||||
self.assertEqual(buf[:], u"ab\uFFFD\uFFFD\uFFFD\0")
|
||||
self.assertEqual(buf[::], u"ab\uFFFD\uFFFD\uFFFD\0")
|
||||
self.assertEqual(buf[::-1], u"\0\uFFFD\uFFFD\uFFFDba")
|
||||
self.assertEqual(buf[::2], u"a\uFFFD\uFFFD")
|
||||
self.assertEqual(buf[6:5:-1], u"")
|
||||
|
||||
ctypes.set_conversion_mode("ascii", "ignore")
|
||||
buf = ctypes.create_unicode_buffer("abäöü")
|
||||
# is that correct? not sure. But with 'ignore', you get what you pay for..
|
||||
self.assertEqual(buf[:], u"ab\0\0\0\0")
|
||||
self.assertEqual(buf[::], u"ab\0\0\0\0")
|
||||
self.assertEqual(buf[::-1], u"\0\0\0\0ba")
|
||||
self.assertEqual(buf[::2], u"a\0\0")
|
||||
self.assertEqual(buf[6:5:-1], u"")
|
||||
|
||||
import _ctypes_test
|
||||
func = ctypes.CDLL(_ctypes_test.__file__)._testfunc_p_p
|
||||
|
||||
class StringTestCase(UnicodeTestCase):
|
||||
def setUp(self):
|
||||
self.prev_conv_mode = ctypes.set_conversion_mode("ascii", "strict")
|
||||
func.argtypes = [ctypes.c_char_p]
|
||||
func.restype = ctypes.c_char_p
|
||||
|
||||
def tearDown(self):
|
||||
ctypes.set_conversion_mode(*self.prev_conv_mode)
|
||||
func.argtypes = None
|
||||
func.restype = ctypes.c_int
|
||||
|
||||
def test_ascii_replace(self):
|
||||
ctypes.set_conversion_mode("ascii", "strict")
|
||||
self.assertEqual(func("abc"), "abc")
|
||||
self.assertEqual(func(u"abc"), "abc")
|
||||
self.assertRaises(ctypes.ArgumentError, func, u"abä")
|
||||
|
||||
def test_ascii_ignore(self):
|
||||
ctypes.set_conversion_mode("ascii", "ignore")
|
||||
self.assertEqual(func("abc"), "abc")
|
||||
self.assertEqual(func(u"abc"), "abc")
|
||||
self.assertEqual(func(u"äöüß"), "")
|
||||
|
||||
def test_ascii_replace(self):
|
||||
ctypes.set_conversion_mode("ascii", "replace")
|
||||
self.assertEqual(func("abc"), "abc")
|
||||
self.assertEqual(func(u"abc"), "abc")
|
||||
self.assertEqual(func(u"äöüß"), "????")
|
||||
|
||||
def test_buffers(self):
|
||||
ctypes.set_conversion_mode("ascii", "strict")
|
||||
buf = ctypes.create_string_buffer(u"abc")
|
||||
self.assertEqual(len(buf), 3+1)
|
||||
|
||||
ctypes.set_conversion_mode("ascii", "replace")
|
||||
buf = ctypes.create_string_buffer(u"abäöü")
|
||||
self.assertEqual(buf[:], "ab???\0")
|
||||
self.assertEqual(buf[::], "ab???\0")
|
||||
self.assertEqual(buf[::-1], "\0???ba")
|
||||
self.assertEqual(buf[::2], "a??")
|
||||
self.assertEqual(buf[6:5:-1], "")
|
||||
|
||||
ctypes.set_conversion_mode("ascii", "ignore")
|
||||
buf = ctypes.create_string_buffer(u"abäöü")
|
||||
# is that correct? not sure. But with 'ignore', you get what you pay for..
|
||||
self.assertEqual(buf[:], "ab\0\0\0\0")
|
||||
self.assertEqual(buf[::], "ab\0\0\0\0")
|
||||
self.assertEqual(buf[::-1], "\0\0\0\0ba")
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
######################################################################
|
||||
# This file should be kept compatible with Python 2.3, see PEP 291. #
|
||||
######################################################################
|
||||
|
||||
# The most useful windows datatypes
|
||||
from ctypes import *
|
||||
|
||||
BYTE = c_byte
|
||||
WORD = c_ushort
|
||||
DWORD = c_ulong
|
||||
|
||||
WCHAR = c_wchar
|
||||
UINT = c_uint
|
||||
INT = c_int
|
||||
|
||||
DOUBLE = c_double
|
||||
FLOAT = c_float
|
||||
|
||||
BOOLEAN = BYTE
|
||||
BOOL = c_long
|
||||
|
||||
from ctypes import _SimpleCData
|
||||
class VARIANT_BOOL(_SimpleCData):
|
||||
_type_ = "v"
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self.value)
|
||||
|
||||
ULONG = c_ulong
|
||||
LONG = c_long
|
||||
|
||||
USHORT = c_ushort
|
||||
SHORT = c_short
|
||||
|
||||
# in the windows header files, these are structures.
|
||||
_LARGE_INTEGER = LARGE_INTEGER = c_longlong
|
||||
_ULARGE_INTEGER = ULARGE_INTEGER = c_ulonglong
|
||||
|
||||
LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p
|
||||
LPCWSTR = LPWSTR = c_wchar_p
|
||||
LPCSTR = LPSTR = c_char_p
|
||||
LPCVOID = LPVOID = c_void_p
|
||||
|
||||
# WPARAM is defined as UINT_PTR (unsigned type)
|
||||
# LPARAM is defined as LONG_PTR (signed type)
|
||||
if sizeof(c_long) == sizeof(c_void_p):
|
||||
WPARAM = c_ulong
|
||||
LPARAM = c_long
|
||||
elif sizeof(c_longlong) == sizeof(c_void_p):
|
||||
WPARAM = c_ulonglong
|
||||
LPARAM = c_longlong
|
||||
|
||||
ATOM = WORD
|
||||
LANGID = WORD
|
||||
|
||||
COLORREF = DWORD
|
||||
LGRPID = DWORD
|
||||
LCTYPE = DWORD
|
||||
|
||||
LCID = DWORD
|
||||
|
||||
################################################################
|
||||
# HANDLE types
|
||||
HANDLE = c_void_p # in the header files: void *
|
||||
|
||||
HACCEL = HANDLE
|
||||
HBITMAP = HANDLE
|
||||
HBRUSH = HANDLE
|
||||
HCOLORSPACE = HANDLE
|
||||
HDC = HANDLE
|
||||
HDESK = HANDLE
|
||||
HDWP = HANDLE
|
||||
HENHMETAFILE = HANDLE
|
||||
HFONT = HANDLE
|
||||
HGDIOBJ = HANDLE
|
||||
HGLOBAL = HANDLE
|
||||
HHOOK = HANDLE
|
||||
HICON = HANDLE
|
||||
HINSTANCE = HANDLE
|
||||
HKEY = HANDLE
|
||||
HKL = HANDLE
|
||||
HLOCAL = HANDLE
|
||||
HMENU = HANDLE
|
||||
HMETAFILE = HANDLE
|
||||
HMODULE = HANDLE
|
||||
HMONITOR = HANDLE
|
||||
HPALETTE = HANDLE
|
||||
HPEN = HANDLE
|
||||
HRGN = HANDLE
|
||||
HRSRC = HANDLE
|
||||
HSTR = HANDLE
|
||||
HTASK = HANDLE
|
||||
HWINSTA = HANDLE
|
||||
HWND = HANDLE
|
||||
SC_HANDLE = HANDLE
|
||||
SERVICE_STATUS_HANDLE = HANDLE
|
||||
|
||||
################################################################
|
||||
# Some important structure definitions
|
||||
|
||||
class RECT(Structure):
|
||||
_fields_ = [("left", c_long),
|
||||
("top", c_long),
|
||||
("right", c_long),
|
||||
("bottom", c_long)]
|
||||
tagRECT = _RECTL = RECTL = RECT
|
||||
|
||||
class _SMALL_RECT(Structure):
|
||||
_fields_ = [('Left', c_short),
|
||||
('Top', c_short),
|
||||
('Right', c_short),
|
||||
('Bottom', c_short)]
|
||||
SMALL_RECT = _SMALL_RECT
|
||||
|
||||
class _COORD(Structure):
|
||||
_fields_ = [('X', c_short),
|
||||
('Y', c_short)]
|
||||
|
||||
class POINT(Structure):
|
||||
_fields_ = [("x", c_long),
|
||||
("y", c_long)]
|
||||
tagPOINT = _POINTL = POINTL = POINT
|
||||
|
||||
class SIZE(Structure):
|
||||
_fields_ = [("cx", c_long),
|
||||
("cy", c_long)]
|
||||
tagSIZE = SIZEL = SIZE
|
||||
|
||||
def RGB(red, green, blue):
|
||||
return red + (green << 8) + (blue << 16)
|
||||
|
||||
class FILETIME(Structure):
|
||||
_fields_ = [("dwLowDateTime", DWORD),
|
||||
("dwHighDateTime", DWORD)]
|
||||
_FILETIME = FILETIME
|
||||
|
||||
class MSG(Structure):
|
||||
_fields_ = [("hWnd", HWND),
|
||||
("message", c_uint),
|
||||
("wParam", WPARAM),
|
||||
("lParam", LPARAM),
|
||||
("time", DWORD),
|
||||
("pt", POINT)]
|
||||
tagMSG = MSG
|
||||
MAX_PATH = 260
|
||||
|
||||
class WIN32_FIND_DATAA(Structure):
|
||||
_fields_ = [("dwFileAttributes", DWORD),
|
||||
("ftCreationTime", FILETIME),
|
||||
("ftLastAccessTime", FILETIME),
|
||||
("ftLastWriteTime", FILETIME),
|
||||
("nFileSizeHigh", DWORD),
|
||||
("nFileSizeLow", DWORD),
|
||||
("dwReserved0", DWORD),
|
||||
("dwReserved1", DWORD),
|
||||
("cFileName", c_char * MAX_PATH),
|
||||
("cAlternateFileName", c_char * 14)]
|
||||
|
||||
class WIN32_FIND_DATAW(Structure):
|
||||
_fields_ = [("dwFileAttributes", DWORD),
|
||||
("ftCreationTime", FILETIME),
|
||||
("ftLastAccessTime", FILETIME),
|
||||
("ftLastWriteTime", FILETIME),
|
||||
("nFileSizeHigh", DWORD),
|
||||
("nFileSizeLow", DWORD),
|
||||
("dwReserved0", DWORD),
|
||||
("dwReserved1", DWORD),
|
||||
("cFileName", c_wchar * MAX_PATH),
|
||||
("cAlternateFileName", c_wchar * 14)]
|
||||
|
||||
__all__ = ['ATOM', 'BOOL', 'BOOLEAN', 'BYTE', 'COLORREF', 'DOUBLE', 'DWORD',
|
||||
'FILETIME', 'FLOAT', 'HACCEL', 'HANDLE', 'HBITMAP', 'HBRUSH',
|
||||
'HCOLORSPACE', 'HDC', 'HDESK', 'HDWP', 'HENHMETAFILE', 'HFONT',
|
||||
'HGDIOBJ', 'HGLOBAL', 'HHOOK', 'HICON', 'HINSTANCE', 'HKEY',
|
||||
'HKL', 'HLOCAL', 'HMENU', 'HMETAFILE', 'HMODULE', 'HMONITOR',
|
||||
'HPALETTE', 'HPEN', 'HRGN', 'HRSRC', 'HSTR', 'HTASK', 'HWINSTA',
|
||||
'HWND', 'INT', 'LANGID', 'LARGE_INTEGER', 'LCID', 'LCTYPE',
|
||||
'LGRPID', 'LONG', 'LPARAM', 'LPCOLESTR', 'LPCSTR', 'LPCVOID',
|
||||
'LPCWSTR', 'LPOLESTR', 'LPSTR', 'LPVOID', 'LPWSTR', 'MAX_PATH',
|
||||
'MSG', 'OLESTR', 'POINT', 'POINTL', 'RECT', 'RECTL', 'RGB',
|
||||
'SC_HANDLE', 'SERVICE_STATUS_HANDLE', 'SHORT', 'SIZE', 'SIZEL',
|
||||
'SMALL_RECT', 'UINT', 'ULARGE_INTEGER', 'ULONG', 'USHORT',
|
||||
'VARIANT_BOOL', 'WCHAR', 'WIN32_FIND_DATAA', 'WIN32_FIND_DATAW',
|
||||
'WORD', 'WPARAM', '_COORD', '_FILETIME', '_LARGE_INTEGER',
|
||||
'_POINTL', '_RECTL', '_SMALL_RECT', '_ULARGE_INTEGER', 'tagMSG',
|
||||
'tagPOINT', 'tagRECT', 'tagSIZE']
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
"""curses.wrapper
|
||||
|
||||
Contains one function, wrapper(), which runs another function which
|
||||
should be the rest of your curses-based application. If the
|
||||
application raises an exception, wrapper() will restore the terminal
|
||||
to a sane state so you can read the resulting traceback.
|
||||
|
||||
"""
|
||||
|
||||
import curses
|
||||
|
||||
def wrapper(func, *args, **kwds):
|
||||
"""Wrapper function that initializes curses and calls another function,
|
||||
restoring normal keyboard/screen behavior on error.
|
||||
The callable object 'func' is then passed the main window 'stdscr'
|
||||
as its first argument, followed by any other arguments passed to
|
||||
wrapper().
|
||||
"""
|
||||
|
||||
try:
|
||||
# Initialize curses
|
||||
stdscr = curses.initscr()
|
||||
|
||||
# Turn off echoing of keys, and enter cbreak mode,
|
||||
# where no buffering is performed on keyboard input
|
||||
curses.noecho()
|
||||
curses.cbreak()
|
||||
|
||||
# In keypad mode, escape sequences for special keys
|
||||
# (like the cursor keys) will be interpreted and
|
||||
# a special value like curses.KEY_LEFT will be returned
|
||||
stdscr.keypad(1)
|
||||
|
||||
# Start color, too. Harmless if the terminal doesn't have
|
||||
# color; user can test with has_color() later on. The try/catch
|
||||
# works around a minor bit of over-conscientiousness in the curses
|
||||
# module -- the error return from C start_color() is ignorable.
|
||||
try:
|
||||
curses.start_color()
|
||||
except:
|
||||
pass
|
||||
|
||||
return func(stdscr, *args, **kwds)
|
||||
finally:
|
||||
# Set everything back to normal
|
||||
if 'stdscr' in locals():
|
||||
stdscr.keypad(0)
|
||||
curses.echo()
|
||||
curses.nocbreak()
|
||||
curses.endwin()
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
"""Provide a (g)dbm-compatible interface to bsddb.hashopen."""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
warnings.warnpy3k("in 3.x, the dbhash module has been removed", stacklevel=2)
|
||||
try:
|
||||
import bsddb
|
||||
except ImportError:
|
||||
# prevent a second import of this module from spuriously succeeding
|
||||
del sys.modules[__name__]
|
||||
raise
|
||||
|
||||
__all__ = ["error","open"]
|
||||
|
||||
error = bsddb.error # Exported for anydbm
|
||||
|
||||
def open(file, flag = 'r', mode=0666):
|
||||
return bsddb.hashopen(file, flag, mode)
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
"""Read and cache directory listings.
|
||||
|
||||
The listdir() routine returns a sorted list of the files in a directory,
|
||||
using a cache to avoid reading the directory more often than necessary.
|
||||
The annotate() routine appends slashes to directories."""
|
||||
from warnings import warnpy3k
|
||||
warnpy3k("the dircache module has been removed in Python 3.0", stacklevel=2)
|
||||
del warnpy3k
|
||||
|
||||
import os
|
||||
|
||||
__all__ = ["listdir", "opendir", "annotate", "reset"]
|
||||
|
||||
cache = {}
|
||||
|
||||
def reset():
|
||||
"""Reset the cache completely."""
|
||||
global cache
|
||||
cache = {}
|
||||
|
||||
def listdir(path):
|
||||
"""List directory contents, using cache."""
|
||||
try:
|
||||
cached_mtime, list = cache[path]
|
||||
del cache[path]
|
||||
except KeyError:
|
||||
cached_mtime, list = -1, []
|
||||
mtime = os.stat(path).st_mtime
|
||||
if mtime != cached_mtime:
|
||||
list = os.listdir(path)
|
||||
list.sort()
|
||||
cache[path] = mtime, list
|
||||
return list
|
||||
|
||||
opendir = listdir # XXX backward compatibility
|
||||
|
||||
def annotate(head, list):
|
||||
"""Add '/' suffixes to directories."""
|
||||
for i in range(len(list)):
|
||||
if os.path.isdir(os.path.join(head, list[i])):
|
||||
list[i] = list[i] + '/'
|
||||
|
|
@ -1,224 +0,0 @@
|
|||
"""Disassembler of Python byte code into mnemonics."""
|
||||
|
||||
import sys
|
||||
import types
|
||||
|
||||
from opcode import *
|
||||
from opcode import __all__ as _opcodes_all
|
||||
|
||||
__all__ = ["dis", "disassemble", "distb", "disco",
|
||||
"findlinestarts", "findlabels"] + _opcodes_all
|
||||
del _opcodes_all
|
||||
|
||||
_have_code = (types.MethodType, types.FunctionType, types.CodeType,
|
||||
types.ClassType, type)
|
||||
|
||||
def dis(x=None):
|
||||
"""Disassemble classes, methods, functions, or code.
|
||||
|
||||
With no argument, disassemble the last traceback.
|
||||
|
||||
"""
|
||||
if x is None:
|
||||
distb()
|
||||
return
|
||||
if isinstance(x, types.InstanceType):
|
||||
x = x.__class__
|
||||
if hasattr(x, 'im_func'):
|
||||
x = x.im_func
|
||||
if hasattr(x, 'func_code'):
|
||||
x = x.func_code
|
||||
if hasattr(x, '__dict__'):
|
||||
items = x.__dict__.items()
|
||||
items.sort()
|
||||
for name, x1 in items:
|
||||
if isinstance(x1, _have_code):
|
||||
print "Disassembly of %s:" % name
|
||||
try:
|
||||
dis(x1)
|
||||
except TypeError, msg:
|
||||
print "Sorry:", msg
|
||||
print
|
||||
elif hasattr(x, 'co_code'):
|
||||
disassemble(x)
|
||||
elif isinstance(x, str):
|
||||
disassemble_string(x)
|
||||
else:
|
||||
raise TypeError, \
|
||||
"don't know how to disassemble %s objects" % \
|
||||
type(x).__name__
|
||||
|
||||
def distb(tb=None):
|
||||
"""Disassemble a traceback (default: last traceback)."""
|
||||
if tb is None:
|
||||
try:
|
||||
tb = sys.last_traceback
|
||||
except AttributeError:
|
||||
raise RuntimeError, "no last traceback to disassemble"
|
||||
while tb.tb_next: tb = tb.tb_next
|
||||
disassemble(tb.tb_frame.f_code, tb.tb_lasti)
|
||||
|
||||
def disassemble(co, lasti=-1):
|
||||
"""Disassemble a code object."""
|
||||
code = co.co_code
|
||||
labels = findlabels(code)
|
||||
linestarts = dict(findlinestarts(co))
|
||||
n = len(code)
|
||||
i = 0
|
||||
extended_arg = 0
|
||||
free = None
|
||||
while i < n:
|
||||
c = code[i]
|
||||
op = ord(c)
|
||||
if i in linestarts:
|
||||
if i > 0:
|
||||
print
|
||||
print "%3d" % linestarts[i],
|
||||
else:
|
||||
print ' ',
|
||||
|
||||
if i == lasti: print '-->',
|
||||
else: print ' ',
|
||||
if i in labels: print '>>',
|
||||
else: print ' ',
|
||||
print repr(i).rjust(4),
|
||||
print opname[op].ljust(20),
|
||||
i = i+1
|
||||
if op >= HAVE_ARGUMENT:
|
||||
oparg = ord(code[i]) + ord(code[i+1])*256 + extended_arg
|
||||
extended_arg = 0
|
||||
i = i+2
|
||||
if op == EXTENDED_ARG:
|
||||
extended_arg = oparg*65536L
|
||||
print repr(oparg).rjust(5),
|
||||
if op in hasconst:
|
||||
print '(' + repr(co.co_consts[oparg]) + ')',
|
||||
elif op in hasname:
|
||||
print '(' + co.co_names[oparg] + ')',
|
||||
elif op in hasjrel:
|
||||
print '(to ' + repr(i + oparg) + ')',
|
||||
elif op in haslocal:
|
||||
print '(' + co.co_varnames[oparg] + ')',
|
||||
elif op in hascompare:
|
||||
print '(' + cmp_op[oparg] + ')',
|
||||
elif op in hasfree:
|
||||
if free is None:
|
||||
free = co.co_cellvars + co.co_freevars
|
||||
print '(' + free[oparg] + ')',
|
||||
print
|
||||
|
||||
def disassemble_string(code, lasti=-1, varnames=None, names=None,
|
||||
constants=None):
|
||||
labels = findlabels(code)
|
||||
n = len(code)
|
||||
i = 0
|
||||
while i < n:
|
||||
c = code[i]
|
||||
op = ord(c)
|
||||
if i == lasti: print '-->',
|
||||
else: print ' ',
|
||||
if i in labels: print '>>',
|
||||
else: print ' ',
|
||||
print repr(i).rjust(4),
|
||||
print opname[op].ljust(15),
|
||||
i = i+1
|
||||
if op >= HAVE_ARGUMENT:
|
||||
oparg = ord(code[i]) + ord(code[i+1])*256
|
||||
i = i+2
|
||||
print repr(oparg).rjust(5),
|
||||
if op in hasconst:
|
||||
if constants:
|
||||
print '(' + repr(constants[oparg]) + ')',
|
||||
else:
|
||||
print '(%d)'%oparg,
|
||||
elif op in hasname:
|
||||
if names is not None:
|
||||
print '(' + names[oparg] + ')',
|
||||
else:
|
||||
print '(%d)'%oparg,
|
||||
elif op in hasjrel:
|
||||
print '(to ' + repr(i + oparg) + ')',
|
||||
elif op in haslocal:
|
||||
if varnames:
|
||||
print '(' + varnames[oparg] + ')',
|
||||
else:
|
||||
print '(%d)' % oparg,
|
||||
elif op in hascompare:
|
||||
print '(' + cmp_op[oparg] + ')',
|
||||
print
|
||||
|
||||
disco = disassemble # XXX For backwards compatibility
|
||||
|
||||
def findlabels(code):
|
||||
"""Detect all offsets in a byte code which are jump targets.
|
||||
|
||||
Return the list of offsets.
|
||||
|
||||
"""
|
||||
labels = []
|
||||
n = len(code)
|
||||
i = 0
|
||||
while i < n:
|
||||
c = code[i]
|
||||
op = ord(c)
|
||||
i = i+1
|
||||
if op >= HAVE_ARGUMENT:
|
||||
oparg = ord(code[i]) + ord(code[i+1])*256
|
||||
i = i+2
|
||||
label = -1
|
||||
if op in hasjrel:
|
||||
label = i+oparg
|
||||
elif op in hasjabs:
|
||||
label = oparg
|
||||
if label >= 0:
|
||||
if label not in labels:
|
||||
labels.append(label)
|
||||
return labels
|
||||
|
||||
def findlinestarts(code):
|
||||
"""Find the offsets in a byte code which are start of lines in the source.
|
||||
|
||||
Generate pairs (offset, lineno) as described in Python/compile.c.
|
||||
|
||||
"""
|
||||
byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
|
||||
line_increments = [ord(c) for c in code.co_lnotab[1::2]]
|
||||
|
||||
lastlineno = None
|
||||
lineno = code.co_firstlineno
|
||||
addr = 0
|
||||
for byte_incr, line_incr in zip(byte_increments, line_increments):
|
||||
if byte_incr:
|
||||
if lineno != lastlineno:
|
||||
yield (addr, lineno)
|
||||
lastlineno = lineno
|
||||
addr += byte_incr
|
||||
lineno += line_incr
|
||||
if lineno != lastlineno:
|
||||
yield (addr, lineno)
|
||||
|
||||
def _test():
|
||||
"""Simple test program to disassemble a file."""
|
||||
if sys.argv[1:]:
|
||||
if sys.argv[2:]:
|
||||
sys.stderr.write("usage: python dis.py [-|file]\n")
|
||||
sys.exit(2)
|
||||
fn = sys.argv[1]
|
||||
if not fn or fn == "-":
|
||||
fn = None
|
||||
else:
|
||||
fn = None
|
||||
if fn is None:
|
||||
f = sys.stdin
|
||||
else:
|
||||
f = open(fn)
|
||||
source = f.read()
|
||||
if fn is not None:
|
||||
f.close()
|
||||
else:
|
||||
fn = "<stdin>"
|
||||
code = compile(source, fn, "exec")
|
||||
dis(code)
|
||||
|
||||
if __name__ == "__main__":
|
||||
_test()
|
||||
|
|
@ -1,319 +0,0 @@
|
|||
"""distutils.emxccompiler
|
||||
|
||||
Provides the EMXCCompiler class, a subclass of UnixCCompiler that
|
||||
handles the EMX port of the GNU C compiler to OS/2.
|
||||
"""
|
||||
|
||||
# issues:
|
||||
#
|
||||
# * OS/2 insists that DLLs can have names no longer than 8 characters
|
||||
# We put export_symbols in a def-file, as though the DLL can have
|
||||
# an arbitrary length name, but truncate the output filename.
|
||||
#
|
||||
# * only use OMF objects and use LINK386 as the linker (-Zomf)
|
||||
#
|
||||
# * always build for multithreading (-Zmt) as the accompanying OS/2 port
|
||||
# of Python is only distributed with threads enabled.
|
||||
#
|
||||
# tested configurations:
|
||||
#
|
||||
# * EMX gcc 2.81/EMX 0.9d fix03
|
||||
|
||||
__revision__ = "$Id$"
|
||||
|
||||
import os,sys,copy
|
||||
from distutils.ccompiler import gen_preprocess_options, gen_lib_options
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
from distutils.file_util import write_file
|
||||
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
|
||||
from distutils import log
|
||||
|
||||
class EMXCCompiler (UnixCCompiler):
|
||||
|
||||
compiler_type = 'emx'
|
||||
obj_extension = ".obj"
|
||||
static_lib_extension = ".lib"
|
||||
shared_lib_extension = ".dll"
|
||||
static_lib_format = "%s%s"
|
||||
shared_lib_format = "%s%s"
|
||||
res_extension = ".res" # compiled resource file
|
||||
exe_extension = ".exe"
|
||||
|
||||
def __init__ (self,
|
||||
verbose=0,
|
||||
dry_run=0,
|
||||
force=0):
|
||||
|
||||
UnixCCompiler.__init__ (self, verbose, dry_run, force)
|
||||
|
||||
(status, details) = check_config_h()
|
||||
self.debug_print("Python's GCC status: %s (details: %s)" %
|
||||
(status, details))
|
||||
if status is not CONFIG_H_OK:
|
||||
self.warn(
|
||||
"Python's pyconfig.h doesn't seem to support your compiler. " +
|
||||
("Reason: %s." % details) +
|
||||
"Compiling may fail because of undefined preprocessor macros.")
|
||||
|
||||
(self.gcc_version, self.ld_version) = \
|
||||
get_versions()
|
||||
self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
|
||||
(self.gcc_version,
|
||||
self.ld_version) )
|
||||
|
||||
# Hard-code GCC because that's what this is all about.
|
||||
# XXX optimization, warnings etc. should be customizable.
|
||||
self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
|
||||
compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
|
||||
linker_exe='gcc -Zomf -Zmt -Zcrtdll',
|
||||
linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
|
||||
|
||||
# want the gcc library statically linked (so that we don't have
|
||||
# to distribute a version dependent on the compiler we have)
|
||||
self.dll_libraries=["gcc"]
|
||||
|
||||
# __init__ ()
|
||||
|
||||
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
if ext == '.rc':
|
||||
# gcc requires '.rc' compiled to binary ('.res') files !!!
|
||||
try:
|
||||
self.spawn(["rc", "-r", src])
|
||||
except DistutilsExecError, msg:
|
||||
raise CompileError, msg
|
||||
else: # for other files use the C-compiler
|
||||
try:
|
||||
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
|
||||
extra_postargs)
|
||||
except DistutilsExecError, msg:
|
||||
raise CompileError, msg
|
||||
|
||||
def link (self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None):
|
||||
|
||||
# use separate copies, so we can modify the lists
|
||||
extra_preargs = copy.copy(extra_preargs or [])
|
||||
libraries = copy.copy(libraries or [])
|
||||
objects = copy.copy(objects or [])
|
||||
|
||||
# Additional libraries
|
||||
libraries.extend(self.dll_libraries)
|
||||
|
||||
# handle export symbols by creating a def-file
|
||||
# with executables this only works with gcc/ld as linker
|
||||
if ((export_symbols is not None) and
|
||||
(target_desc != self.EXECUTABLE)):
|
||||
# (The linker doesn't do anything if output is up-to-date.
|
||||
# So it would probably better to check if we really need this,
|
||||
# but for this we had to insert some unchanged parts of
|
||||
# UnixCCompiler, and this is not what we want.)
|
||||
|
||||
# we want to put some files in the same directory as the
|
||||
# object files are, build_temp doesn't help much
|
||||
# where are the object files
|
||||
temp_dir = os.path.dirname(objects[0])
|
||||
# name of dll to give the helper files the same base name
|
||||
(dll_name, dll_extension) = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
|
||||
# generate the filenames for these files
|
||||
def_file = os.path.join(temp_dir, dll_name + ".def")
|
||||
|
||||
# Generate .def file
|
||||
contents = [
|
||||
"LIBRARY %s INITINSTANCE TERMINSTANCE" % \
|
||||
os.path.splitext(os.path.basename(output_filename))[0],
|
||||
"DATA MULTIPLE NONSHARED",
|
||||
"EXPORTS"]
|
||||
for sym in export_symbols:
|
||||
contents.append(' "%s"' % sym)
|
||||
self.execute(write_file, (def_file, contents),
|
||||
"writing %s" % def_file)
|
||||
|
||||
# next add options for def-file and to creating import libraries
|
||||
# for gcc/ld the def-file is specified as any other object files
|
||||
objects.append(def_file)
|
||||
|
||||
#end: if ((export_symbols is not None) and
|
||||
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
||||
|
||||
# who wants symbols and a many times larger output file
|
||||
# should explicitly switch the debug mode on
|
||||
# otherwise we let dllwrap/ld strip the output file
|
||||
# (On my machine: 10KB < stripped_file < ??100KB
|
||||
# unstripped_file = stripped_file + XXX KB
|
||||
# ( XXX=254 for a typical python extension))
|
||||
if not debug:
|
||||
extra_preargs.append("-s")
|
||||
|
||||
UnixCCompiler.link(self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir,
|
||||
libraries,
|
||||
library_dirs,
|
||||
runtime_library_dirs,
|
||||
None, # export_symbols, we do this in our def-file
|
||||
debug,
|
||||
extra_preargs,
|
||||
extra_postargs,
|
||||
build_temp,
|
||||
target_lang)
|
||||
|
||||
# link ()
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
|
||||
# override the object_filenames method from CCompiler to
|
||||
# support rc and res-files
|
||||
def object_filenames (self,
|
||||
source_filenames,
|
||||
strip_dir=0,
|
||||
output_dir=''):
|
||||
if output_dir is None: output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
||||
(base, ext) = os.path.splitext (os.path.normcase(src_name))
|
||||
if ext not in (self.src_extensions + ['.rc']):
|
||||
raise UnknownFileError, \
|
||||
"unknown file type '%s' (from '%s')" % \
|
||||
(ext, src_name)
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext == '.rc':
|
||||
# these need to be compiled to object files
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.res_extension))
|
||||
else:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
# object_filenames ()
|
||||
|
||||
# override the find_library_file method from UnixCCompiler
|
||||
# to deal with file naming/searching differences
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
shortlib = '%s.lib' % lib
|
||||
longlib = 'lib%s.lib' % lib # this form very rare
|
||||
|
||||
# get EMX's default library directory search path
|
||||
try:
|
||||
emx_dirs = os.environ['LIBRARY_PATH'].split(';')
|
||||
except KeyError:
|
||||
emx_dirs = []
|
||||
|
||||
for dir in dirs + emx_dirs:
|
||||
shortlibp = os.path.join(dir, shortlib)
|
||||
longlibp = os.path.join(dir, longlib)
|
||||
if os.path.exists(shortlibp):
|
||||
return shortlibp
|
||||
elif os.path.exists(longlibp):
|
||||
return longlibp
|
||||
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# class EMXCCompiler
|
||||
|
||||
|
||||
# Because these compilers aren't configured in Python's pyconfig.h file by
|
||||
# default, we should at least warn the user if he is using a unmodified
|
||||
# version.
|
||||
|
||||
CONFIG_H_OK = "ok"
|
||||
CONFIG_H_NOTOK = "not ok"
|
||||
CONFIG_H_UNCERTAIN = "uncertain"
|
||||
|
||||
def check_config_h():
|
||||
|
||||
"""Check if the current Python installation (specifically, pyconfig.h)
|
||||
appears amenable to building extensions with GCC. Returns a tuple
|
||||
(status, details), where 'status' is one of the following constants:
|
||||
CONFIG_H_OK
|
||||
all is well, go ahead and compile
|
||||
CONFIG_H_NOTOK
|
||||
doesn't look good
|
||||
CONFIG_H_UNCERTAIN
|
||||
not sure -- unable to read pyconfig.h
|
||||
'details' is a human-readable string explaining the situation.
|
||||
|
||||
Note there are two ways to conclude "OK": either 'sys.version' contains
|
||||
the string "GCC" (implying that this Python was built with GCC), or the
|
||||
installed "pyconfig.h" contains the string "__GNUC__".
|
||||
"""
|
||||
|
||||
# XXX since this function also checks sys.version, it's not strictly a
|
||||
# "pyconfig.h" check -- should probably be renamed...
|
||||
|
||||
from distutils import sysconfig
|
||||
import string
|
||||
# if sys.version contains GCC then python was compiled with
|
||||
# GCC, and the pyconfig.h file should be OK
|
||||
if string.find(sys.version,"GCC") >= 0:
|
||||
return (CONFIG_H_OK, "sys.version mentions 'GCC'")
|
||||
|
||||
fn = sysconfig.get_config_h_filename()
|
||||
try:
|
||||
# It would probably better to read single lines to search.
|
||||
# But we do this only once, and it is fast enough
|
||||
f = open(fn)
|
||||
try:
|
||||
s = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
except IOError, exc:
|
||||
# if we can't read this file, we cannot say it is wrong
|
||||
# the compiler will complain later about this file as missing
|
||||
return (CONFIG_H_UNCERTAIN,
|
||||
"couldn't read '%s': %s" % (fn, exc.strerror))
|
||||
|
||||
else:
|
||||
# "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
|
||||
if string.find(s,"__GNUC__") >= 0:
|
||||
return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
|
||||
else:
|
||||
return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
|
||||
|
||||
|
||||
def get_versions():
|
||||
""" Try to find out the versions of gcc and ld.
|
||||
If not possible it returns None for it.
|
||||
"""
|
||||
from distutils.version import StrictVersion
|
||||
from distutils.spawn import find_executable
|
||||
import re
|
||||
|
||||
gcc_exe = find_executable('gcc')
|
||||
if gcc_exe:
|
||||
out = os.popen(gcc_exe + ' -dumpversion','r')
|
||||
try:
|
||||
out_string = out.read()
|
||||
finally:
|
||||
out.close()
|
||||
result = re.search('(\d+\.\d+\.\d+)',out_string)
|
||||
if result:
|
||||
gcc_version = StrictVersion(result.group(1))
|
||||
else:
|
||||
gcc_version = None
|
||||
else:
|
||||
gcc_version = None
|
||||
# EMX ld has no way of reporting version number, and we use GCC
|
||||
# anyway - so we can link OMF DLLs
|
||||
ld_version = None
|
||||
return (gcc_version, ld_version)
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
from distutils.core import Extension as _Extension
|
||||
from distutils.core import Distribution as _Distribution
|
||||
|
||||
def _get_unpatched(cls):
|
||||
"""Protect against re-patching the distutils if reloaded
|
||||
|
||||
Also ensures that no other distutils extension monkeypatched the distutils
|
||||
first.
|
||||
"""
|
||||
while cls.__module__.startswith('setuptools'):
|
||||
cls, = cls.__bases__
|
||||
if not cls.__module__.startswith('distutils'):
|
||||
raise AssertionError(
|
||||
"distutils has already been patched by %r" % cls
|
||||
)
|
||||
return cls
|
||||
|
||||
_Distribution = _get_unpatched(_Distribution)
|
||||
_Extension = _get_unpatched(_Extension)
|
||||
|
||||
try:
|
||||
from Pyrex.Distutils.build_ext import build_ext
|
||||
except ImportError:
|
||||
have_pyrex = False
|
||||
else:
|
||||
have_pyrex = True
|
||||
|
||||
|
||||
class Extension(_Extension):
|
||||
"""Extension that uses '.c' files in place of '.pyx' files"""
|
||||
|
||||
if not have_pyrex:
|
||||
# convert .pyx extensions to .c
|
||||
def __init__(self,*args,**kw):
|
||||
_Extension.__init__(self,*args,**kw)
|
||||
sources = []
|
||||
for s in self.sources:
|
||||
if s.endswith('.pyx'):
|
||||
sources.append(s[:-3]+'c')
|
||||
else:
|
||||
sources.append(s)
|
||||
self.sources = sources
|
||||
|
||||
class Library(Extension):
|
||||
"""Just like a regular Extension, but built as a library instead"""
|
||||
|
||||
import sys, distutils.core, distutils.extension
|
||||
distutils.core.Extension = Extension
|
||||
distutils.extension.Extension = Extension
|
||||
if 'distutils.command.build_ext' in sys.modules:
|
||||
sys.modules['distutils.command.build_ext'].Extension = Extension
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
"""Tests for distutils.command.build_py."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import StringIO
|
||||
import unittest
|
||||
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.core import Distribution
|
||||
from distutils.errors import DistutilsFileError
|
||||
|
||||
from distutils.tests import support
|
||||
from test.test_support import run_unittest
|
||||
|
||||
|
||||
class BuildPyTestCase(support.TempdirManager,
|
||||
support.LoggingSilencer,
|
||||
unittest.TestCase):
|
||||
|
||||
def test_package_data(self):
|
||||
sources = self.mkdtemp()
|
||||
f = open(os.path.join(sources, "__init__.py"), "w")
|
||||
try:
|
||||
f.write("# Pretend this is a package.")
|
||||
finally:
|
||||
f.close()
|
||||
f = open(os.path.join(sources, "README.txt"), "w")
|
||||
try:
|
||||
f.write("Info about this package")
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
destination = self.mkdtemp()
|
||||
|
||||
dist = Distribution({"packages": ["pkg"],
|
||||
"package_dir": {"pkg": sources}})
|
||||
# script_name need not exist, it just need to be initialized
|
||||
dist.script_name = os.path.join(sources, "setup.py")
|
||||
dist.command_obj["build"] = support.DummyCommand(
|
||||
force=0,
|
||||
build_lib=destination)
|
||||
dist.packages = ["pkg"]
|
||||
dist.package_data = {"pkg": ["README.txt"]}
|
||||
dist.package_dir = {"pkg": sources}
|
||||
|
||||
cmd = build_py(dist)
|
||||
cmd.compile = 1
|
||||
cmd.ensure_finalized()
|
||||
self.assertEqual(cmd.package_data, dist.package_data)
|
||||
|
||||
cmd.run()
|
||||
|
||||
# This makes sure the list of outputs includes byte-compiled
|
||||
# files for Python modules but not for package data files
|
||||
# (there shouldn't *be* byte-code files for those!).
|
||||
#
|
||||
self.assertEqual(len(cmd.get_outputs()), 3)
|
||||
pkgdest = os.path.join(destination, "pkg")
|
||||
files = os.listdir(pkgdest)
|
||||
self.assertIn("__init__.py", files)
|
||||
self.assertIn("README.txt", files)
|
||||
# XXX even with -O, distutils writes pyc, not pyo; bug?
|
||||
if sys.dont_write_bytecode:
|
||||
self.assertNotIn("__init__.pyc", files)
|
||||
else:
|
||||
self.assertIn("__init__.pyc", files)
|
||||
|
||||
def test_empty_package_dir(self):
|
||||
# See SF 1668596/1720897.
|
||||
cwd = os.getcwd()
|
||||
|
||||
# create the distribution files.
|
||||
sources = self.mkdtemp()
|
||||
open(os.path.join(sources, "__init__.py"), "w").close()
|
||||
|
||||
testdir = os.path.join(sources, "doc")
|
||||
os.mkdir(testdir)
|
||||
open(os.path.join(testdir, "testfile"), "w").close()
|
||||
|
||||
os.chdir(sources)
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = StringIO.StringIO()
|
||||
|
||||
try:
|
||||
dist = Distribution({"packages": ["pkg"],
|
||||
"package_dir": {"pkg": ""},
|
||||
"package_data": {"pkg": ["doc/*"]}})
|
||||
# script_name need not exist, it just need to be initialized
|
||||
dist.script_name = os.path.join(sources, "setup.py")
|
||||
dist.script_args = ["build"]
|
||||
dist.parse_command_line()
|
||||
|
||||
try:
|
||||
dist.run_commands()
|
||||
except DistutilsFileError:
|
||||
self.fail("failed package_data test when package_dir is ''")
|
||||
finally:
|
||||
# Restore state.
|
||||
os.chdir(cwd)
|
||||
sys.stdout = old_stdout
|
||||
|
||||
def test_dont_write_bytecode(self):
|
||||
# makes sure byte_compile is not used
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build_py(dist)
|
||||
cmd.compile = 1
|
||||
cmd.optimize = 1
|
||||
|
||||
old_dont_write_bytecode = sys.dont_write_bytecode
|
||||
sys.dont_write_bytecode = True
|
||||
try:
|
||||
cmd.byte_compile([])
|
||||
finally:
|
||||
sys.dont_write_bytecode = old_dont_write_bytecode
|
||||
|
||||
self.assertIn('byte-compiling is disabled', self.logs[0][1])
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(BuildPyTestCase)
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_unittest(test_suite())
|
||||
|
|
@ -1,82 +0,0 @@
|
|||
"""Tests for distutils.ccompiler."""
|
||||
import os
|
||||
import unittest
|
||||
from test.test_support import captured_stdout
|
||||
|
||||
from distutils.ccompiler import (gen_lib_options, CCompiler,
|
||||
get_default_compiler)
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils import debug
|
||||
from distutils.tests import support
|
||||
|
||||
class FakeCompiler(object):
|
||||
def library_dir_option(self, dir):
|
||||
return "-L" + dir
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
return ["-cool", "-R" + dir]
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
return 'found'
|
||||
|
||||
def library_option(self, lib):
|
||||
return "-l" + lib
|
||||
|
||||
class CCompilerTestCase(support.EnvironGuard, unittest.TestCase):
|
||||
|
||||
def test_gen_lib_options(self):
|
||||
compiler = FakeCompiler()
|
||||
libdirs = ['lib1', 'lib2']
|
||||
runlibdirs = ['runlib1']
|
||||
libs = [os.path.join('dir', 'name'), 'name2']
|
||||
|
||||
opts = gen_lib_options(compiler, libdirs, runlibdirs, libs)
|
||||
wanted = ['-Llib1', '-Llib2', '-cool', '-Rrunlib1', 'found',
|
||||
'-lname2']
|
||||
self.assertEqual(opts, wanted)
|
||||
|
||||
def test_debug_print(self):
|
||||
|
||||
class MyCCompiler(CCompiler):
|
||||
executables = {}
|
||||
|
||||
compiler = MyCCompiler()
|
||||
with captured_stdout() as stdout:
|
||||
compiler.debug_print('xxx')
|
||||
stdout.seek(0)
|
||||
self.assertEqual(stdout.read(), '')
|
||||
|
||||
debug.DEBUG = True
|
||||
try:
|
||||
with captured_stdout() as stdout:
|
||||
compiler.debug_print('xxx')
|
||||
stdout.seek(0)
|
||||
self.assertEqual(stdout.read(), 'xxx\n')
|
||||
finally:
|
||||
debug.DEBUG = False
|
||||
|
||||
def test_customize_compiler(self):
|
||||
|
||||
# not testing if default compiler is not unix
|
||||
if get_default_compiler() != 'unix':
|
||||
return
|
||||
|
||||
os.environ['AR'] = 'my_ar'
|
||||
os.environ['ARFLAGS'] = '-arflags'
|
||||
|
||||
# make sure AR gets caught
|
||||
class compiler:
|
||||
compiler_type = 'unix'
|
||||
|
||||
def set_executables(self, **kw):
|
||||
self.exes = kw
|
||||
|
||||
comp = compiler()
|
||||
customize_compiler(comp)
|
||||
self.assertEqual(comp.exes['archiver'], 'my_ar -arflags')
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(CCompilerTestCase)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(defaultTest="test_suite")
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
"""Tests for distutils.sysconfig."""
|
||||
import os
|
||||
import test
|
||||
import unittest
|
||||
import shutil
|
||||
|
||||
from distutils import sysconfig
|
||||
from distutils.tests import support
|
||||
from test.test_support import TESTFN
|
||||
|
||||
class SysconfigTestCase(support.EnvironGuard,
|
||||
unittest.TestCase):
|
||||
def setUp(self):
|
||||
super(SysconfigTestCase, self).setUp()
|
||||
self.makefile = None
|
||||
|
||||
def tearDown(self):
|
||||
if self.makefile is not None:
|
||||
os.unlink(self.makefile)
|
||||
self.cleanup_testfn()
|
||||
super(SysconfigTestCase, self).tearDown()
|
||||
|
||||
def cleanup_testfn(self):
|
||||
path = test.test_support.TESTFN
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
elif os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
def test_get_python_lib(self):
|
||||
lib_dir = sysconfig.get_python_lib()
|
||||
# XXX doesn't work on Linux when Python was never installed before
|
||||
#self.assertTrue(os.path.isdir(lib_dir), lib_dir)
|
||||
# test for pythonxx.lib?
|
||||
self.assertNotEqual(sysconfig.get_python_lib(),
|
||||
sysconfig.get_python_lib(prefix=TESTFN))
|
||||
_sysconfig = __import__('sysconfig')
|
||||
res = sysconfig.get_python_lib(True, True)
|
||||
self.assertEqual(_sysconfig.get_path('platstdlib'), res)
|
||||
|
||||
def test_get_python_inc(self):
|
||||
inc_dir = sysconfig.get_python_inc()
|
||||
# This is not much of a test. We make sure Python.h exists
|
||||
# in the directory returned by get_python_inc() but we don't know
|
||||
# it is the correct file.
|
||||
self.assertTrue(os.path.isdir(inc_dir), inc_dir)
|
||||
python_h = os.path.join(inc_dir, "Python.h")
|
||||
self.assertTrue(os.path.isfile(python_h), python_h)
|
||||
|
||||
def test_parse_makefile_base(self):
|
||||
self.makefile = test.test_support.TESTFN
|
||||
fd = open(self.makefile, 'w')
|
||||
try:
|
||||
fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n')
|
||||
fd.write('VAR=$OTHER\nOTHER=foo')
|
||||
finally:
|
||||
fd.close()
|
||||
d = sysconfig.parse_makefile(self.makefile)
|
||||
self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
|
||||
'OTHER': 'foo'})
|
||||
|
||||
def test_parse_makefile_literal_dollar(self):
|
||||
self.makefile = test.test_support.TESTFN
|
||||
fd = open(self.makefile, 'w')
|
||||
try:
|
||||
fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n')
|
||||
fd.write('VAR=$OTHER\nOTHER=foo')
|
||||
finally:
|
||||
fd.close()
|
||||
d = sysconfig.parse_makefile(self.makefile)
|
||||
self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'",
|
||||
'OTHER': 'foo'})
|
||||
|
||||
|
||||
def test_sysconfig_module(self):
|
||||
import sysconfig as global_sysconfig
|
||||
self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), sysconfig.get_config_var('CFLAGS'))
|
||||
self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), sysconfig.get_config_var('LDFLAGS'))
|
||||
|
||||
@unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'),'compiler flags customized')
|
||||
def test_sysconfig_compiler_vars(self):
|
||||
# On OS X, binary installers support extension module building on
|
||||
# various levels of the operating system with differing Xcode
|
||||
# configurations. This requires customization of some of the
|
||||
# compiler configuration directives to suit the environment on
|
||||
# the installed machine. Some of these customizations may require
|
||||
# running external programs and, so, are deferred until needed by
|
||||
# the first extension module build. With Python 3.3, only
|
||||
# the Distutils version of sysconfig is used for extension module
|
||||
# builds, which happens earlier in the Distutils tests. This may
|
||||
# cause the following tests to fail since no tests have caused
|
||||
# the global version of sysconfig to call the customization yet.
|
||||
# The solution for now is to simply skip this test in this case.
|
||||
# The longer-term solution is to only have one version of sysconfig.
|
||||
|
||||
import sysconfig as global_sysconfig
|
||||
if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
|
||||
return
|
||||
self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), sysconfig.get_config_var('LDSHARED'))
|
||||
self.assertEqual(global_sysconfig.get_config_var('CC'), sysconfig.get_config_var('CC'))
|
||||
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(SysconfigTestCase))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test.test_support.run_unittest(test_suite())
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
"""Tests for distutils.util."""
|
||||
import sys
|
||||
import unittest
|
||||
from test.test_support import run_unittest
|
||||
|
||||
from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError
|
||||
from distutils.util import byte_compile
|
||||
|
||||
class UtilTestCase(unittest.TestCase):
|
||||
|
||||
def test_dont_write_bytecode(self):
|
||||
# makes sure byte_compile raise a DistutilsError
|
||||
# if sys.dont_write_bytecode is True
|
||||
old_dont_write_bytecode = sys.dont_write_bytecode
|
||||
sys.dont_write_bytecode = True
|
||||
try:
|
||||
self.assertRaises(DistutilsByteCompileError, byte_compile, [])
|
||||
finally:
|
||||
sys.dont_write_bytecode = old_dont_write_bytecode
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(UtilTestCase)
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_unittest(test_suite())
|
||||
|
|
@ -1,123 +0,0 @@
|
|||
# Copyright (C) 2001-2006 Python Software Foundation
|
||||
# Author: Barry Warsaw
|
||||
# Contact: email-sig@python.org
|
||||
|
||||
"""A package for parsing, handling, and generating email messages."""
|
||||
|
||||
__version__ = '4.0.3'
|
||||
|
||||
__all__ = [
|
||||
# Old names
|
||||
'base64MIME',
|
||||
'Charset',
|
||||
'Encoders',
|
||||
'Errors',
|
||||
'Generator',
|
||||
'Header',
|
||||
'Iterators',
|
||||
'Message',
|
||||
'MIMEAudio',
|
||||
'MIMEBase',
|
||||
'MIMEImage',
|
||||
'MIMEMessage',
|
||||
'MIMEMultipart',
|
||||
'MIMENonMultipart',
|
||||
'MIMEText',
|
||||
'Parser',
|
||||
'quopriMIME',
|
||||
'Utils',
|
||||
'message_from_string',
|
||||
'message_from_file',
|
||||
# new names
|
||||
'base64mime',
|
||||
'charset',
|
||||
'encoders',
|
||||
'errors',
|
||||
'generator',
|
||||
'header',
|
||||
'iterators',
|
||||
'message',
|
||||
'mime',
|
||||
'parser',
|
||||
'quoprimime',
|
||||
'utils',
|
||||
]
|
||||
|
||||
|
||||
|
||||
# Some convenience routines. Don't import Parser and Message as side-effects
|
||||
# of importing email since those cascadingly import most of the rest of the
|
||||
# email package.
|
||||
def message_from_string(s, *args, **kws):
|
||||
"""Parse a string into a Message object model.
|
||||
|
||||
Optional _class and strict are passed to the Parser constructor.
|
||||
"""
|
||||
from email.parser import Parser
|
||||
return Parser(*args, **kws).parsestr(s)
|
||||
|
||||
|
||||
def message_from_file(fp, *args, **kws):
|
||||
"""Read a file and parse its contents into a Message object model.
|
||||
|
||||
Optional _class and strict are passed to the Parser constructor.
|
||||
"""
|
||||
from email.parser import Parser
|
||||
return Parser(*args, **kws).parse(fp)
|
||||
|
||||
|
||||
|
||||
# Lazy loading to provide name mapping from new-style names (PEP 8 compatible
|
||||
# email 4.0 module names), to old-style names (email 3.0 module names).
|
||||
import sys
|
||||
|
||||
class LazyImporter(object):
|
||||
def __init__(self, module_name):
|
||||
self.__name__ = 'email.' + module_name
|
||||
|
||||
def __getattr__(self, name):
|
||||
__import__(self.__name__)
|
||||
mod = sys.modules[self.__name__]
|
||||
self.__dict__.update(mod.__dict__)
|
||||
return getattr(mod, name)
|
||||
|
||||
|
||||
_LOWERNAMES = [
|
||||
# email.<old name> -> email.<new name is lowercased old name>
|
||||
'Charset',
|
||||
'Encoders',
|
||||
'Errors',
|
||||
'FeedParser',
|
||||
'Generator',
|
||||
'Header',
|
||||
'Iterators',
|
||||
'Message',
|
||||
'Parser',
|
||||
'Utils',
|
||||
'base64MIME',
|
||||
'quopriMIME',
|
||||
]
|
||||
|
||||
_MIMENAMES = [
|
||||
# email.MIME<old name> -> email.mime.<new name is lowercased old name>
|
||||
'Audio',
|
||||
'Base',
|
||||
'Image',
|
||||
'Message',
|
||||
'Multipart',
|
||||
'NonMultipart',
|
||||
'Text',
|
||||
]
|
||||
|
||||
for _name in _LOWERNAMES:
|
||||
importer = LazyImporter(_name.lower())
|
||||
sys.modules['email.' + _name] = importer
|
||||
setattr(sys.modules['email'], _name, importer)
|
||||
|
||||
|
||||
import email.mime
|
||||
for _name in _MIMENAMES:
|
||||
importer = LazyImporter('mime.' + _name.lower())
|
||||
sys.modules['email.MIME' + _name] = importer
|
||||
setattr(sys.modules['email'], 'MIME' + _name, importer)
|
||||
setattr(sys.modules['email.mime'], _name, importer)
|
||||
|
|
@ -1,183 +0,0 @@
|
|||
# Copyright (C) 2002-2006 Python Software Foundation
|
||||
# Author: Ben Gertzfield
|
||||
# Contact: email-sig@python.org
|
||||
|
||||
"""Base64 content transfer encoding per RFCs 2045-2047.
|
||||
|
||||
This module handles the content transfer encoding method defined in RFC 2045
|
||||
to encode arbitrary 8-bit data using the three 8-bit bytes in four 7-bit
|
||||
characters encoding known as Base64.
|
||||
|
||||
It is used in the MIME standards for email to attach images, audio, and text
|
||||
using some 8-bit character sets to messages.
|
||||
|
||||
This module provides an interface to encode and decode both headers and bodies
|
||||
with Base64 encoding.
|
||||
|
||||
RFC 2045 defines a method for including character set information in an
|
||||
`encoded-word' in a header. This method is commonly used for 8-bit real names
|
||||
in To:, From:, Cc:, etc. fields, as well as Subject: lines.
|
||||
|
||||
This module does not do the line wrapping or end-of-line character conversion
|
||||
necessary for proper internationalized headers; it only does dumb encoding and
|
||||
decoding. To deal with the various line wrapping issues, use the email.header
|
||||
module.
|
||||
"""
|
||||
|
||||
__all__ = [
|
||||
'base64_len',
|
||||
'body_decode',
|
||||
'body_encode',
|
||||
'decode',
|
||||
'decodestring',
|
||||
'encode',
|
||||
'encodestring',
|
||||
'header_encode',
|
||||
]
|
||||
|
||||
|
||||
from binascii import b2a_base64, a2b_base64
|
||||
from email.utils import fix_eols
|
||||
|
||||
CRLF = '\r\n'
|
||||
NL = '\n'
|
||||
EMPTYSTRING = ''
|
||||
|
||||
# See also Charset.py
|
||||
MISC_LEN = 7
|
||||
|
||||
|
||||
|
||||
# Helpers
|
||||
def base64_len(s):
|
||||
"""Return the length of s when it is encoded with base64."""
|
||||
groups_of_3, leftover = divmod(len(s), 3)
|
||||
# 4 bytes out for each 3 bytes (or nonzero fraction thereof) in.
|
||||
# Thanks, Tim!
|
||||
n = groups_of_3 * 4
|
||||
if leftover:
|
||||
n += 4
|
||||
return n
|
||||
|
||||
|
||||
|
||||
def header_encode(header, charset='iso-8859-1', keep_eols=False,
|
||||
maxlinelen=76, eol=NL):
|
||||
"""Encode a single header line with Base64 encoding in a given charset.
|
||||
|
||||
Defined in RFC 2045, this Base64 encoding is identical to normal Base64
|
||||
encoding, except that each line must be intelligently wrapped (respecting
|
||||
the Base64 encoding), and subsequent lines must start with a space.
|
||||
|
||||
charset names the character set to use to encode the header. It defaults
|
||||
to iso-8859-1.
|
||||
|
||||
End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
|
||||
to the canonical email line separator \\r\\n unless the keep_eols
|
||||
parameter is True (the default is False).
|
||||
|
||||
Each line of the header will be terminated in the value of eol, which
|
||||
defaults to "\\n". Set this to "\\r\\n" if you are using the result of
|
||||
this function directly in email.
|
||||
|
||||
The resulting string will be in the form:
|
||||
|
||||
"=?charset?b?WW/5ciBtYXp66XLrIHf8eiBhIGhhbXBzdGHuciBBIFlv+XIgbWF6euly?=\\n
|
||||
=?charset?b?6yB3/HogYSBoYW1wc3Rh7nIgQkMgWW/5ciBtYXp66XLrIHf8eiBhIGhh?="
|
||||
|
||||
with each line wrapped at, at most, maxlinelen characters (defaults to 76
|
||||
characters).
|
||||
"""
|
||||
# Return empty headers unchanged
|
||||
if not header:
|
||||
return header
|
||||
|
||||
if not keep_eols:
|
||||
header = fix_eols(header)
|
||||
|
||||
# Base64 encode each line, in encoded chunks no greater than maxlinelen in
|
||||
# length, after the RFC chrome is added in.
|
||||
base64ed = []
|
||||
max_encoded = maxlinelen - len(charset) - MISC_LEN
|
||||
max_unencoded = max_encoded * 3 // 4
|
||||
|
||||
for i in range(0, len(header), max_unencoded):
|
||||
base64ed.append(b2a_base64(header[i:i+max_unencoded]))
|
||||
|
||||
# Now add the RFC chrome to each encoded chunk
|
||||
lines = []
|
||||
for line in base64ed:
|
||||
# Ignore the last character of each line if it is a newline
|
||||
if line.endswith(NL):
|
||||
line = line[:-1]
|
||||
# Add the chrome
|
||||
lines.append('=?%s?b?%s?=' % (charset, line))
|
||||
# Glue the lines together and return it. BAW: should we be able to
|
||||
# specify the leading whitespace in the joiner?
|
||||
joiner = eol + ' '
|
||||
return joiner.join(lines)
|
||||
|
||||
|
||||
|
||||
def encode(s, binary=True, maxlinelen=76, eol=NL):
|
||||
"""Encode a string with base64.
|
||||
|
||||
Each line will be wrapped at, at most, maxlinelen characters (defaults to
|
||||
76 characters).
|
||||
|
||||
If binary is False, end-of-line characters will be converted to the
|
||||
canonical email end-of-line sequence \\r\\n. Otherwise they will be left
|
||||
verbatim (this is the default).
|
||||
|
||||
Each line of encoded text will end with eol, which defaults to "\\n". Set
|
||||
this to "\\r\\n" if you will be using the result of this function directly
|
||||
in an email.
|
||||
"""
|
||||
if not s:
|
||||
return s
|
||||
|
||||
if not binary:
|
||||
s = fix_eols(s)
|
||||
|
||||
encvec = []
|
||||
max_unencoded = maxlinelen * 3 // 4
|
||||
for i in range(0, len(s), max_unencoded):
|
||||
# BAW: should encode() inherit b2a_base64()'s dubious behavior in
|
||||
# adding a newline to the encoded string?
|
||||
enc = b2a_base64(s[i:i + max_unencoded])
|
||||
if enc.endswith(NL) and eol != NL:
|
||||
enc = enc[:-1] + eol
|
||||
encvec.append(enc)
|
||||
return EMPTYSTRING.join(encvec)
|
||||
|
||||
|
||||
# For convenience and backwards compatibility w/ standard base64 module
|
||||
body_encode = encode
|
||||
encodestring = encode
|
||||
|
||||
|
||||
|
||||
def decode(s, convert_eols=None):
|
||||
"""Decode a raw base64 string.
|
||||
|
||||
If convert_eols is set to a string value, all canonical email linefeeds,
|
||||
e.g. "\\r\\n", in the decoded text will be converted to the value of
|
||||
convert_eols. os.linesep is a good choice for convert_eols if you are
|
||||
decoding a text attachment.
|
||||
|
||||
This function does not parse a full MIME header value encoded with
|
||||
base64 (like =?iso-8895-1?b?bmloISBuaWgh?=) -- please use the high
|
||||
level email.header class for that functionality.
|
||||
"""
|
||||
if not s:
|
||||
return s
|
||||
|
||||
dec = a2b_base64(s)
|
||||
if convert_eols:
|
||||
return dec.replace(CRLF, convert_eols)
|
||||
return dec
|
||||
|
||||
|
||||
# For convenience and backwards compatibility w/ standard base64 module
|
||||
body_decode = decode
|
||||
decodestring = decode
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
# Copyright (C) 2001-2006 Python Software Foundation
|
||||
# Author: Barry Warsaw
|
||||
# Contact: email-sig@python.org
|
||||
|
||||
"""email package exception classes."""
|
||||
|
||||
|
||||
|
||||
class MessageError(Exception):
|
||||
"""Base class for errors in the email package."""
|
||||
|
||||
|
||||
class MessageParseError(MessageError):
|
||||
"""Base class for message parsing errors."""
|
||||
|
||||
|
||||
class HeaderParseError(MessageParseError):
|
||||
"""Error while parsing headers."""
|
||||
|
||||
|
||||
class BoundaryError(MessageParseError):
|
||||
"""Couldn't find terminating boundary."""
|
||||
|
||||
|
||||
class MultipartConversionError(MessageError, TypeError):
|
||||
"""Conversion to a multipart is prohibited."""
|
||||
|
||||
|
||||
class CharsetError(MessageError):
|
||||
"""An illegal charset was given."""
|
||||
|
||||
|
||||
|
||||
# These are parsing defects which the parser was able to work around.
|
||||
class MessageDefect:
|
||||
"""Base class for a message defect."""
|
||||
|
||||
def __init__(self, line=None):
|
||||
self.line = line
|
||||
|
||||
class NoBoundaryInMultipartDefect(MessageDefect):
|
||||
"""A message claimed to be a multipart but had no boundary parameter."""
|
||||
|
||||
class StartBoundaryNotFoundDefect(MessageDefect):
|
||||
"""The claimed start boundary was never found."""
|
||||
|
||||
class FirstHeaderLineIsContinuationDefect(MessageDefect):
|
||||
"""A message had a continuation line as its first header line."""
|
||||
|
||||
class MisplacedEnvelopeHeaderDefect(MessageDefect):
|
||||
"""A 'Unix-from' header was found in the middle of a header block."""
|
||||
|
||||
class MalformedHeaderDefect(MessageDefect):
|
||||
"""Found a header that was missing a colon, or was otherwise malformed."""
|
||||
|
||||
class MultipartInvariantViolationDefect(MessageDefect):
|
||||
"""A message claimed to be a multipart but no subparts were found."""
|
||||
|
|
@ -1,514 +0,0 @@
|
|||
# Copyright (C) 2002-2006 Python Software Foundation
|
||||
# Author: Ben Gertzfield, Barry Warsaw
|
||||
# Contact: email-sig@python.org
|
||||
|
||||
"""Header encoding and decoding functionality."""
|
||||
|
||||
__all__ = [
|
||||
'Header',
|
||||
'decode_header',
|
||||
'make_header',
|
||||
]
|
||||
|
||||
import re
|
||||
import binascii
|
||||
|
||||
import email.quoprimime
|
||||
import email.base64mime
|
||||
|
||||
from email.errors import HeaderParseError
|
||||
from email.charset import Charset
|
||||
|
||||
NL = '\n'
|
||||
SPACE = ' '
|
||||
USPACE = u' '
|
||||
SPACE8 = ' ' * 8
|
||||
UEMPTYSTRING = u''
|
||||
|
||||
MAXLINELEN = 76
|
||||
|
||||
USASCII = Charset('us-ascii')
|
||||
UTF8 = Charset('utf-8')
|
||||
|
||||
# Match encoded-word strings in the form =?charset?q?Hello_World?=
|
||||
ecre = re.compile(r'''
|
||||
=\? # literal =?
|
||||
(?P<charset>[^?]*?) # non-greedy up to the next ? is the charset
|
||||
\? # literal ?
|
||||
(?P<encoding>[qb]) # either a "q" or a "b", case insensitive
|
||||
\? # literal ?
|
||||
(?P<encoded>.*?) # non-greedy up to the next ?= is the encoded string
|
||||
\?= # literal ?=
|
||||
(?=[ \t]|$) # whitespace or the end of the string
|
||||
''', re.VERBOSE | re.IGNORECASE | re.MULTILINE)
|
||||
|
||||
# Field name regexp, including trailing colon, but not separating whitespace,
|
||||
# according to RFC 2822. Character range is from tilde to exclamation mark.
|
||||
# For use with .match()
|
||||
fcre = re.compile(r'[\041-\176]+:$')
|
||||
|
||||
# Find a header embedded in a putative header value. Used to check for
|
||||
# header injection attack.
|
||||
_embeded_header = re.compile(r'\n[^ \t]+:')
|
||||
|
||||
|
||||
|
||||
# Helpers
|
||||
_max_append = email.quoprimime._max_append
|
||||
|
||||
|
||||
|
||||
def decode_header(header):
|
||||
"""Decode a message header value without converting charset.
|
||||
|
||||
Returns a list of (decoded_string, charset) pairs containing each of the
|
||||
decoded parts of the header. Charset is None for non-encoded parts of the
|
||||
header, otherwise a lower-case string containing the name of the character
|
||||
set specified in the encoded string.
|
||||
|
||||
An email.errors.HeaderParseError may be raised when certain decoding error
|
||||
occurs (e.g. a base64 decoding exception).
|
||||
"""
|
||||
# If no encoding, just return the header
|
||||
header = str(header)
|
||||
if not ecre.search(header):
|
||||
return [(header, None)]
|
||||
decoded = []
|
||||
dec = ''
|
||||
for line in header.splitlines():
|
||||
# This line might not have an encoding in it
|
||||
if not ecre.search(line):
|
||||
decoded.append((line, None))
|
||||
continue
|
||||
parts = ecre.split(line)
|
||||
while parts:
|
||||
unenc = parts.pop(0).strip()
|
||||
if unenc:
|
||||
# Should we continue a long line?
|
||||
if decoded and decoded[-1][1] is None:
|
||||
decoded[-1] = (decoded[-1][0] + SPACE + unenc, None)
|
||||
else:
|
||||
decoded.append((unenc, None))
|
||||
if parts:
|
||||
charset, encoding = [s.lower() for s in parts[0:2]]
|
||||
encoded = parts[2]
|
||||
dec = None
|
||||
if encoding == 'q':
|
||||
dec = email.quoprimime.header_decode(encoded)
|
||||
elif encoding == 'b':
|
||||
paderr = len(encoded) % 4 # Postel's law: add missing padding
|
||||
if paderr:
|
||||
encoded += '==='[:4 - paderr]
|
||||
try:
|
||||
dec = email.base64mime.decode(encoded)
|
||||
except binascii.Error:
|
||||
# Turn this into a higher level exception. BAW: Right
|
||||
# now we throw the lower level exception away but
|
||||
# when/if we get exception chaining, we'll preserve it.
|
||||
raise HeaderParseError
|
||||
if dec is None:
|
||||
dec = encoded
|
||||
|
||||
if decoded and decoded[-1][1] == charset:
|
||||
decoded[-1] = (decoded[-1][0] + dec, decoded[-1][1])
|
||||
else:
|
||||
decoded.append((dec, charset))
|
||||
del parts[0:3]
|
||||
return decoded
|
||||
|
||||
|
||||
|
||||
def make_header(decoded_seq, maxlinelen=None, header_name=None,
|
||||
continuation_ws=' '):
|
||||
"""Create a Header from a sequence of pairs as returned by decode_header()
|
||||
|
||||
decode_header() takes a header value string and returns a sequence of
|
||||
pairs of the format (decoded_string, charset) where charset is the string
|
||||
name of the character set.
|
||||
|
||||
This function takes one of those sequence of pairs and returns a Header
|
||||
instance. Optional maxlinelen, header_name, and continuation_ws are as in
|
||||
the Header constructor.
|
||||
"""
|
||||
h = Header(maxlinelen=maxlinelen, header_name=header_name,
|
||||
continuation_ws=continuation_ws)
|
||||
for s, charset in decoded_seq:
|
||||
# None means us-ascii but we can simply pass it on to h.append()
|
||||
if charset is not None and not isinstance(charset, Charset):
|
||||
charset = Charset(charset)
|
||||
h.append(s, charset)
|
||||
return h
|
||||
|
||||
|
||||
|
||||
class Header:
|
||||
def __init__(self, s=None, charset=None,
|
||||
maxlinelen=None, header_name=None,
|
||||
continuation_ws=' ', errors='strict'):
|
||||
"""Create a MIME-compliant header that can contain many character sets.
|
||||
|
||||
Optional s is the initial header value. If None, the initial header
|
||||
value is not set. You can later append to the header with .append()
|
||||
method calls. s may be a byte string or a Unicode string, but see the
|
||||
.append() documentation for semantics.
|
||||
|
||||
Optional charset serves two purposes: it has the same meaning as the
|
||||
charset argument to the .append() method. It also sets the default
|
||||
character set for all subsequent .append() calls that omit the charset
|
||||
argument. If charset is not provided in the constructor, the us-ascii
|
||||
charset is used both as s's initial charset and as the default for
|
||||
subsequent .append() calls.
|
||||
|
||||
The maximum line length can be specified explicit via maxlinelen. For
|
||||
splitting the first line to a shorter value (to account for the field
|
||||
header which isn't included in s, e.g. `Subject') pass in the name of
|
||||
the field in header_name. The default maxlinelen is 76.
|
||||
|
||||
continuation_ws must be RFC 2822 compliant folding whitespace (usually
|
||||
either a space or a hard tab) which will be prepended to continuation
|
||||
lines.
|
||||
|
||||
errors is passed through to the .append() call.
|
||||
"""
|
||||
if charset is None:
|
||||
charset = USASCII
|
||||
if not isinstance(charset, Charset):
|
||||
charset = Charset(charset)
|
||||
self._charset = charset
|
||||
self._continuation_ws = continuation_ws
|
||||
cws_expanded_len = len(continuation_ws.replace('\t', SPACE8))
|
||||
# BAW: I believe `chunks' and `maxlinelen' should be non-public.
|
||||
self._chunks = []
|
||||
if s is not None:
|
||||
self.append(s, charset, errors)
|
||||
if maxlinelen is None:
|
||||
maxlinelen = MAXLINELEN
|
||||
if header_name is None:
|
||||
# We don't know anything about the field header so the first line
|
||||
# is the same length as subsequent lines.
|
||||
self._firstlinelen = maxlinelen
|
||||
else:
|
||||
# The first line should be shorter to take into account the field
|
||||
# header. Also subtract off 2 extra for the colon and space.
|
||||
self._firstlinelen = maxlinelen - len(header_name) - 2
|
||||
# Second and subsequent lines should subtract off the length in
|
||||
# columns of the continuation whitespace prefix.
|
||||
self._maxlinelen = maxlinelen - cws_expanded_len
|
||||
|
||||
def __str__(self):
|
||||
"""A synonym for self.encode()."""
|
||||
return self.encode()
|
||||
|
||||
def __unicode__(self):
|
||||
"""Helper for the built-in unicode function."""
|
||||
uchunks = []
|
||||
lastcs = None
|
||||
for s, charset in self._chunks:
|
||||
# We must preserve spaces between encoded and non-encoded word
|
||||
# boundaries, which means for us we need to add a space when we go
|
||||
# from a charset to None/us-ascii, or from None/us-ascii to a
|
||||
# charset. Only do this for the second and subsequent chunks.
|
||||
nextcs = charset
|
||||
if uchunks:
|
||||
if lastcs not in (None, 'us-ascii'):
|
||||
if nextcs in (None, 'us-ascii'):
|
||||
uchunks.append(USPACE)
|
||||
nextcs = None
|
||||
elif nextcs not in (None, 'us-ascii'):
|
||||
uchunks.append(USPACE)
|
||||
lastcs = nextcs
|
||||
uchunks.append(unicode(s, str(charset)))
|
||||
return UEMPTYSTRING.join(uchunks)
|
||||
|
||||
# Rich comparison operators for equality only. BAW: does it make sense to
|
||||
# have or explicitly disable <, <=, >, >= operators?
|
||||
def __eq__(self, other):
|
||||
# other may be a Header or a string. Both are fine so coerce
|
||||
# ourselves to a string, swap the args and do another comparison.
|
||||
return other == self.encode()
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def append(self, s, charset=None, errors='strict'):
|
||||
"""Append a string to the MIME header.
|
||||
|
||||
Optional charset, if given, should be a Charset instance or the name
|
||||
of a character set (which will be converted to a Charset instance). A
|
||||
value of None (the default) means that the charset given in the
|
||||
constructor is used.
|
||||
|
||||
s may be a byte string or a Unicode string. If it is a byte string
|
||||
(i.e. isinstance(s, str) is true), then charset is the encoding of
|
||||
that byte string, and a UnicodeError will be raised if the string
|
||||
cannot be decoded with that charset. If s is a Unicode string, then
|
||||
charset is a hint specifying the character set of the characters in
|
||||
the string. In this case, when producing an RFC 2822 compliant header
|
||||
using RFC 2047 rules, the Unicode string will be encoded using the
|
||||
following charsets in order: us-ascii, the charset hint, utf-8. The
|
||||
first character set not to provoke a UnicodeError is used.
|
||||
|
||||
Optional `errors' is passed as the third argument to any unicode() or
|
||||
ustr.encode() call.
|
||||
"""
|
||||
if charset is None:
|
||||
charset = self._charset
|
||||
elif not isinstance(charset, Charset):
|
||||
charset = Charset(charset)
|
||||
# If the charset is our faux 8bit charset, leave the string unchanged
|
||||
if charset != '8bit':
|
||||
# We need to test that the string can be converted to unicode and
|
||||
# back to a byte string, given the input and output codecs of the
|
||||
# charset.
|
||||
if isinstance(s, str):
|
||||
# Possibly raise UnicodeError if the byte string can't be
|
||||
# converted to a unicode with the input codec of the charset.
|
||||
incodec = charset.input_codec or 'us-ascii'
|
||||
ustr = unicode(s, incodec, errors)
|
||||
# Now make sure that the unicode could be converted back to a
|
||||
# byte string with the output codec, which may be different
|
||||
# than the iput coded. Still, use the original byte string.
|
||||
outcodec = charset.output_codec or 'us-ascii'
|
||||
ustr.encode(outcodec, errors)
|
||||
elif isinstance(s, unicode):
|
||||
# Now we have to be sure the unicode string can be converted
|
||||
# to a byte string with a reasonable output codec. We want to
|
||||
# use the byte string in the chunk.
|
||||
for charset in USASCII, charset, UTF8:
|
||||
try:
|
||||
outcodec = charset.output_codec or 'us-ascii'
|
||||
s = s.encode(outcodec, errors)
|
||||
break
|
||||
except UnicodeError:
|
||||
pass
|
||||
else:
|
||||
assert False, 'utf-8 conversion failed'
|
||||
self._chunks.append((s, charset))
|
||||
|
||||
def _split(self, s, charset, maxlinelen, splitchars):
|
||||
# Split up a header safely for use with encode_chunks.
|
||||
splittable = charset.to_splittable(s)
|
||||
encoded = charset.from_splittable(splittable, True)
|
||||
elen = charset.encoded_header_len(encoded)
|
||||
# If the line's encoded length first, just return it
|
||||
if elen <= maxlinelen:
|
||||
return [(encoded, charset)]
|
||||
# If we have undetermined raw 8bit characters sitting in a byte
|
||||
# string, we really don't know what the right thing to do is. We
|
||||
# can't really split it because it might be multibyte data which we
|
||||
# could break if we split it between pairs. The least harm seems to
|
||||
# be to not split the header at all, but that means they could go out
|
||||
# longer than maxlinelen.
|
||||
if charset == '8bit':
|
||||
return [(s, charset)]
|
||||
# BAW: I'm not sure what the right test here is. What we're trying to
|
||||
# do is be faithful to RFC 2822's recommendation that ($2.2.3):
|
||||
#
|
||||
# "Note: Though structured field bodies are defined in such a way that
|
||||
# folding can take place between many of the lexical tokens (and even
|
||||
# within some of the lexical tokens), folding SHOULD be limited to
|
||||
# placing the CRLF at higher-level syntactic breaks."
|
||||
#
|
||||
# For now, I can only imagine doing this when the charset is us-ascii,
|
||||
# although it's possible that other charsets may also benefit from the
|
||||
# higher-level syntactic breaks.
|
||||
elif charset == 'us-ascii':
|
||||
return self._split_ascii(s, charset, maxlinelen, splitchars)
|
||||
# BAW: should we use encoded?
|
||||
elif elen == len(s):
|
||||
# We can split on _maxlinelen boundaries because we know that the
|
||||
# encoding won't change the size of the string
|
||||
splitpnt = maxlinelen
|
||||
first = charset.from_splittable(splittable[:splitpnt], False)
|
||||
last = charset.from_splittable(splittable[splitpnt:], False)
|
||||
else:
|
||||
# Binary search for split point
|
||||
first, last = _binsplit(splittable, charset, maxlinelen)
|
||||
# first is of the proper length so just wrap it in the appropriate
|
||||
# chrome. last must be recursively split.
|
||||
fsplittable = charset.to_splittable(first)
|
||||
fencoded = charset.from_splittable(fsplittable, True)
|
||||
chunk = [(fencoded, charset)]
|
||||
return chunk + self._split(last, charset, self._maxlinelen, splitchars)
|
||||
|
||||
def _split_ascii(self, s, charset, firstlen, splitchars):
|
||||
chunks = _split_ascii(s, firstlen, self._maxlinelen,
|
||||
self._continuation_ws, splitchars)
|
||||
return zip(chunks, [charset]*len(chunks))
|
||||
|
||||
def _encode_chunks(self, newchunks, maxlinelen):
|
||||
# MIME-encode a header with many different charsets and/or encodings.
|
||||
#
|
||||
# Given a list of pairs (string, charset), return a MIME-encoded
|
||||
# string suitable for use in a header field. Each pair may have
|
||||
# different charsets and/or encodings, and the resulting header will
|
||||
# accurately reflect each setting.
|
||||
#
|
||||
# Each encoding can be email.utils.QP (quoted-printable, for
|
||||
# ASCII-like character sets like iso-8859-1), email.utils.BASE64
|
||||
# (Base64, for non-ASCII like character sets like KOI8-R and
|
||||
# iso-2022-jp), or None (no encoding).
|
||||
#
|
||||
# Each pair will be represented on a separate line; the resulting
|
||||
# string will be in the format:
|
||||
#
|
||||
# =?charset1?q?Mar=EDa_Gonz=E1lez_Alonso?=\n
|
||||
# =?charset2?b?SvxyZ2VuIEL2aW5n?="
|
||||
chunks = []
|
||||
for header, charset in newchunks:
|
||||
if not header:
|
||||
continue
|
||||
if charset is None or charset.header_encoding is None:
|
||||
s = header
|
||||
else:
|
||||
s = charset.header_encode(header)
|
||||
# Don't add more folding whitespace than necessary
|
||||
if chunks and chunks[-1].endswith(' '):
|
||||
extra = ''
|
||||
else:
|
||||
extra = ' '
|
||||
_max_append(chunks, s, maxlinelen, extra)
|
||||
joiner = NL + self._continuation_ws
|
||||
return joiner.join(chunks)
|
||||
|
||||
def encode(self, splitchars=';, '):
|
||||
"""Encode a message header into an RFC-compliant format.
|
||||
|
||||
There are many issues involved in converting a given string for use in
|
||||
an email header. Only certain character sets are readable in most
|
||||
email clients, and as header strings can only contain a subset of
|
||||
7-bit ASCII, care must be taken to properly convert and encode (with
|
||||
Base64 or quoted-printable) header strings. In addition, there is a
|
||||
75-character length limit on any given encoded header field, so
|
||||
line-wrapping must be performed, even with double-byte character sets.
|
||||
|
||||
This method will do its best to convert the string to the correct
|
||||
character set used in email, and encode and line wrap it safely with
|
||||
the appropriate scheme for that character set.
|
||||
|
||||
If the given charset is not known or an error occurs during
|
||||
conversion, this function will return the header untouched.
|
||||
|
||||
Optional splitchars is a string containing characters to split long
|
||||
ASCII lines on, in rough support of RFC 2822's `highest level
|
||||
syntactic breaks'. This doesn't affect RFC 2047 encoded lines.
|
||||
"""
|
||||
newchunks = []
|
||||
maxlinelen = self._firstlinelen
|
||||
lastlen = 0
|
||||
for s, charset in self._chunks:
|
||||
# The first bit of the next chunk should be just long enough to
|
||||
# fill the next line. Don't forget the space separating the
|
||||
# encoded words.
|
||||
targetlen = maxlinelen - lastlen - 1
|
||||
if targetlen < charset.encoded_header_len(''):
|
||||
# Stick it on the next line
|
||||
targetlen = maxlinelen
|
||||
newchunks += self._split(s, charset, targetlen, splitchars)
|
||||
lastchunk, lastcharset = newchunks[-1]
|
||||
lastlen = lastcharset.encoded_header_len(lastchunk)
|
||||
value = self._encode_chunks(newchunks, maxlinelen)
|
||||
if _embeded_header.search(value):
|
||||
raise HeaderParseError("header value appears to contain "
|
||||
"an embedded header: {!r}".format(value))
|
||||
return value
|
||||
|
||||
|
||||
|
||||
def _split_ascii(s, firstlen, restlen, continuation_ws, splitchars):
|
||||
lines = []
|
||||
maxlen = firstlen
|
||||
for line in s.splitlines():
|
||||
# Ignore any leading whitespace (i.e. continuation whitespace) already
|
||||
# on the line, since we'll be adding our own.
|
||||
line = line.lstrip()
|
||||
if len(line) < maxlen:
|
||||
lines.append(line)
|
||||
maxlen = restlen
|
||||
continue
|
||||
# Attempt to split the line at the highest-level syntactic break
|
||||
# possible. Note that we don't have a lot of smarts about field
|
||||
# syntax; we just try to break on semi-colons, then commas, then
|
||||
# whitespace.
|
||||
for ch in splitchars:
|
||||
if ch in line:
|
||||
break
|
||||
else:
|
||||
# There's nothing useful to split the line on, not even spaces, so
|
||||
# just append this line unchanged
|
||||
lines.append(line)
|
||||
maxlen = restlen
|
||||
continue
|
||||
# Now split the line on the character plus trailing whitespace
|
||||
cre = re.compile(r'%s\s*' % ch)
|
||||
if ch in ';,':
|
||||
eol = ch
|
||||
else:
|
||||
eol = ''
|
||||
joiner = eol + ' '
|
||||
joinlen = len(joiner)
|
||||
wslen = len(continuation_ws.replace('\t', SPACE8))
|
||||
this = []
|
||||
linelen = 0
|
||||
for part in cre.split(line):
|
||||
curlen = linelen + max(0, len(this)-1) * joinlen
|
||||
partlen = len(part)
|
||||
onfirstline = not lines
|
||||
# We don't want to split after the field name, if we're on the
|
||||
# first line and the field name is present in the header string.
|
||||
if ch == ' ' and onfirstline and \
|
||||
len(this) == 1 and fcre.match(this[0]):
|
||||
this.append(part)
|
||||
linelen += partlen
|
||||
elif curlen + partlen > maxlen:
|
||||
if this:
|
||||
lines.append(joiner.join(this) + eol)
|
||||
# If this part is longer than maxlen and we aren't already
|
||||
# splitting on whitespace, try to recursively split this line
|
||||
# on whitespace.
|
||||
if partlen > maxlen and ch != ' ':
|
||||
subl = _split_ascii(part, maxlen, restlen,
|
||||
continuation_ws, ' ')
|
||||
lines.extend(subl[:-1])
|
||||
this = [subl[-1]]
|
||||
else:
|
||||
this = [part]
|
||||
linelen = wslen + len(this[-1])
|
||||
maxlen = restlen
|
||||
else:
|
||||
this.append(part)
|
||||
linelen += partlen
|
||||
# Put any left over parts on a line by themselves
|
||||
if this:
|
||||
lines.append(joiner.join(this))
|
||||
return lines
|
||||
|
||||
|
||||
|
||||
def _binsplit(splittable, charset, maxlinelen):
|
||||
i = 0
|
||||
j = len(splittable)
|
||||
while i < j:
|
||||
# Invariants:
|
||||
# 1. splittable[:k] fits for all k <= i (note that we *assume*,
|
||||
# at the start, that splittable[:0] fits).
|
||||
# 2. splittable[:k] does not fit for any k > j (at the start,
|
||||
# this means we shouldn't look at any k > len(splittable)).
|
||||
# 3. We don't know about splittable[:k] for k in i+1..j.
|
||||
# 4. We want to set i to the largest k that fits, with i <= k <= j.
|
||||
#
|
||||
m = (i+j+1) >> 1 # ceiling((i+j)/2); i < m <= j
|
||||
chunk = charset.from_splittable(splittable[:m], True)
|
||||
chunklen = charset.encoded_header_len(chunk)
|
||||
if chunklen <= maxlinelen:
|
||||
# m is acceptable, so is a new lower bound.
|
||||
i = m
|
||||
else:
|
||||
# m is not acceptable, so final i must be < m.
|
||||
j = m - 1
|
||||
# i == j. Invariant #1 implies that splittable[:i] fits, and
|
||||
# invariant #2 implies that splittable[:i+1] does not fit, so i
|
||||
# is what we're looking for.
|
||||
first = charset.from_splittable(splittable[:i], False)
|
||||
last = charset.from_splittable(splittable[i:], False)
|
||||
return first, last
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# Copyright (C) 2001-2006 Python Software Foundation
|
||||
# Author: Barry Warsaw, Thomas Wouters, Anthony Baxter
|
||||
# Contact: email-sig@python.org
|
||||
|
||||
"""A parser of RFC 2822 and MIME email messages."""
|
||||
|
||||
__all__ = ['Parser', 'HeaderParser']
|
||||
|
||||
import warnings
|
||||
from cStringIO import StringIO
|
||||
|
||||
from email.feedparser import FeedParser
|
||||
from email.message import Message
|
||||
|
||||
|
||||
|
||||
class Parser:
|
||||
def __init__(self, *args, **kws):
|
||||
"""Parser of RFC 2822 and MIME email messages.
|
||||
|
||||
Creates an in-memory object tree representing the email message, which
|
||||
can then be manipulated and turned over to a Generator to return the
|
||||
textual representation of the message.
|
||||
|
||||
The string must be formatted as a block of RFC 2822 headers and header
|
||||
continuation lines, optionally preceeded by a `Unix-from' header. The
|
||||
header block is terminated either by the end of the string or by a
|
||||
blank line.
|
||||
|
||||
_class is the class to instantiate for new message objects when they
|
||||
must be created. This class must have a constructor that can take
|
||||
zero arguments. Default is Message.Message.
|
||||
"""
|
||||
if len(args) >= 1:
|
||||
if '_class' in kws:
|
||||
raise TypeError("Multiple values for keyword arg '_class'")
|
||||
kws['_class'] = args[0]
|
||||
if len(args) == 2:
|
||||
if 'strict' in kws:
|
||||
raise TypeError("Multiple values for keyword arg 'strict'")
|
||||
kws['strict'] = args[1]
|
||||
if len(args) > 2:
|
||||
raise TypeError('Too many arguments')
|
||||
if '_class' in kws:
|
||||
self._class = kws['_class']
|
||||
del kws['_class']
|
||||
else:
|
||||
self._class = Message
|
||||
if 'strict' in kws:
|
||||
warnings.warn("'strict' argument is deprecated (and ignored)",
|
||||
DeprecationWarning, 2)
|
||||
del kws['strict']
|
||||
if kws:
|
||||
raise TypeError('Unexpected keyword arguments')
|
||||
|
||||
def parse(self, fp, headersonly=False):
|
||||
"""Create a message structure from the data in a file.
|
||||
|
||||
Reads all the data from the file and returns the root of the message
|
||||
structure. Optional headersonly is a flag specifying whether to stop
|
||||
parsing after reading the headers or not. The default is False,
|
||||
meaning it parses the entire contents of the file.
|
||||
"""
|
||||
feedparser = FeedParser(self._class)
|
||||
if headersonly:
|
||||
feedparser._set_headersonly()
|
||||
while True:
|
||||
data = fp.read(8192)
|
||||
if not data:
|
||||
break
|
||||
feedparser.feed(data)
|
||||
return feedparser.close()
|
||||
|
||||
def parsestr(self, text, headersonly=False):
|
||||
"""Create a message structure from a string.
|
||||
|
||||
Returns the root of the message structure. Optional headersonly is a
|
||||
flag specifying whether to stop parsing after reading the headers or
|
||||
not. The default is False, meaning it parses the entire contents of
|
||||
the file.
|
||||
"""
|
||||
return self.parse(StringIO(text), headersonly=headersonly)
|
||||
|
||||
|
||||
|
||||
class HeaderParser(Parser):
|
||||
def parse(self, fp, headersonly=True):
|
||||
return Parser.parse(self, fp, True)
|
||||
|
||||
def parsestr(self, text, headersonly=True):
|
||||
return Parser.parsestr(self, text, True)
|
||||
|
|
@ -1,336 +0,0 @@
|
|||
# Copyright (C) 2001-2006 Python Software Foundation
|
||||
# Author: Ben Gertzfield
|
||||
# Contact: email-sig@python.org
|
||||
|
||||
"""Quoted-printable content transfer encoding per RFCs 2045-2047.
|
||||
|
||||
This module handles the content transfer encoding method defined in RFC 2045
|
||||
to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to
|
||||
safely encode text that is in a character set similar to the 7-bit US ASCII
|
||||
character set, but that includes some 8-bit characters that are normally not
|
||||
allowed in email bodies or headers.
|
||||
|
||||
Quoted-printable is very space-inefficient for encoding binary files; use the
|
||||
email.base64mime module for that instead.
|
||||
|
||||
This module provides an interface to encode and decode both headers and bodies
|
||||
with quoted-printable encoding.
|
||||
|
||||
RFC 2045 defines a method for including character set information in an
|
||||
`encoded-word' in a header. This method is commonly used for 8-bit real names
|
||||
in To:/From:/Cc: etc. fields, as well as Subject: lines.
|
||||
|
||||
This module does not do the line wrapping or end-of-line character
|
||||
conversion necessary for proper internationalized headers; it only
|
||||
does dumb encoding and decoding. To deal with the various line
|
||||
wrapping issues, use the email.header module.
|
||||
"""
|
||||
|
||||
__all__ = [
|
||||
'body_decode',
|
||||
'body_encode',
|
||||
'body_quopri_check',
|
||||
'body_quopri_len',
|
||||
'decode',
|
||||
'decodestring',
|
||||
'encode',
|
||||
'encodestring',
|
||||
'header_decode',
|
||||
'header_encode',
|
||||
'header_quopri_check',
|
||||
'header_quopri_len',
|
||||
'quote',
|
||||
'unquote',
|
||||
]
|
||||
|
||||
import re
|
||||
|
||||
from string import hexdigits
|
||||
from email.utils import fix_eols
|
||||
|
||||
CRLF = '\r\n'
|
||||
NL = '\n'
|
||||
|
||||
# See also Charset.py
|
||||
MISC_LEN = 7
|
||||
|
||||
hqre = re.compile(r'[^-a-zA-Z0-9!*+/ ]')
|
||||
bqre = re.compile(r'[^ !-<>-~\t]')
|
||||
|
||||
|
||||
|
||||
# Helpers
|
||||
def header_quopri_check(c):
|
||||
"""Return True if the character should be escaped with header quopri."""
|
||||
return bool(hqre.match(c))
|
||||
|
||||
|
||||
def body_quopri_check(c):
|
||||
"""Return True if the character should be escaped with body quopri."""
|
||||
return bool(bqre.match(c))
|
||||
|
||||
|
||||
def header_quopri_len(s):
|
||||
"""Return the length of str when it is encoded with header quopri."""
|
||||
count = 0
|
||||
for c in s:
|
||||
if hqre.match(c):
|
||||
count += 3
|
||||
else:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def body_quopri_len(str):
|
||||
"""Return the length of str when it is encoded with body quopri."""
|
||||
count = 0
|
||||
for c in str:
|
||||
if bqre.match(c):
|
||||
count += 3
|
||||
else:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def _max_append(L, s, maxlen, extra=''):
|
||||
if not L:
|
||||
L.append(s.lstrip())
|
||||
elif len(L[-1]) + len(s) <= maxlen:
|
||||
L[-1] += extra + s
|
||||
else:
|
||||
L.append(s.lstrip())
|
||||
|
||||
|
||||
def unquote(s):
|
||||
"""Turn a string in the form =AB to the ASCII character with value 0xab"""
|
||||
return chr(int(s[1:3], 16))
|
||||
|
||||
|
||||
def quote(c):
|
||||
return "=%02X" % ord(c)
|
||||
|
||||
|
||||
|
||||
def header_encode(header, charset="iso-8859-1", keep_eols=False,
|
||||
maxlinelen=76, eol=NL):
|
||||
"""Encode a single header line with quoted-printable (like) encoding.
|
||||
|
||||
Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but
|
||||
used specifically for email header fields to allow charsets with mostly 7
|
||||
bit characters (and some 8 bit) to remain more or less readable in non-RFC
|
||||
2045 aware mail clients.
|
||||
|
||||
charset names the character set to use to encode the header. It defaults
|
||||
to iso-8859-1.
|
||||
|
||||
The resulting string will be in the form:
|
||||
|
||||
"=?charset?q?I_f=E2rt_in_your_g=E8n=E8ral_dire=E7tion?\\n
|
||||
=?charset?q?Silly_=C8nglish_Kn=EEghts?="
|
||||
|
||||
with each line wrapped safely at, at most, maxlinelen characters (defaults
|
||||
to 76 characters). If maxlinelen is None, the entire string is encoded in
|
||||
one chunk with no splitting.
|
||||
|
||||
End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
|
||||
to the canonical email line separator \\r\\n unless the keep_eols
|
||||
parameter is True (the default is False).
|
||||
|
||||
Each line of the header will be terminated in the value of eol, which
|
||||
defaults to "\\n". Set this to "\\r\\n" if you are using the result of
|
||||
this function directly in email.
|
||||
"""
|
||||
# Return empty headers unchanged
|
||||
if not header:
|
||||
return header
|
||||
|
||||
if not keep_eols:
|
||||
header = fix_eols(header)
|
||||
|
||||
# Quopri encode each line, in encoded chunks no greater than maxlinelen in
|
||||
# length, after the RFC chrome is added in.
|
||||
quoted = []
|
||||
if maxlinelen is None:
|
||||
# An obnoxiously large number that's good enough
|
||||
max_encoded = 100000
|
||||
else:
|
||||
max_encoded = maxlinelen - len(charset) - MISC_LEN - 1
|
||||
|
||||
for c in header:
|
||||
# Space may be represented as _ instead of =20 for readability
|
||||
if c == ' ':
|
||||
_max_append(quoted, '_', max_encoded)
|
||||
# These characters can be included verbatim
|
||||
elif not hqre.match(c):
|
||||
_max_append(quoted, c, max_encoded)
|
||||
# Otherwise, replace with hex value like =E2
|
||||
else:
|
||||
_max_append(quoted, "=%02X" % ord(c), max_encoded)
|
||||
|
||||
# Now add the RFC chrome to each encoded chunk and glue the chunks
|
||||
# together. BAW: should we be able to specify the leading whitespace in
|
||||
# the joiner?
|
||||
joiner = eol + ' '
|
||||
return joiner.join(['=?%s?q?%s?=' % (charset, line) for line in quoted])
|
||||
|
||||
|
||||
|
||||
def encode(body, binary=False, maxlinelen=76, eol=NL):
|
||||
"""Encode with quoted-printable, wrapping at maxlinelen characters.
|
||||
|
||||
If binary is False (the default), end-of-line characters will be converted
|
||||
to the canonical email end-of-line sequence \\r\\n. Otherwise they will
|
||||
be left verbatim.
|
||||
|
||||
Each line of encoded text will end with eol, which defaults to "\\n". Set
|
||||
this to "\\r\\n" if you will be using the result of this function directly
|
||||
in an email.
|
||||
|
||||
Each line will be wrapped at, at most, maxlinelen characters (defaults to
|
||||
76 characters). Long lines will have the `soft linefeed' quoted-printable
|
||||
character "=" appended to them, so the decoded text will be identical to
|
||||
the original text.
|
||||
"""
|
||||
if not body:
|
||||
return body
|
||||
|
||||
if not binary:
|
||||
body = fix_eols(body)
|
||||
|
||||
# BAW: We're accumulating the body text by string concatenation. That
|
||||
# can't be very efficient, but I don't have time now to rewrite it. It
|
||||
# just feels like this algorithm could be more efficient.
|
||||
encoded_body = ''
|
||||
lineno = -1
|
||||
# Preserve line endings here so we can check later to see an eol needs to
|
||||
# be added to the output later.
|
||||
lines = body.splitlines(1)
|
||||
for line in lines:
|
||||
# But strip off line-endings for processing this line.
|
||||
if line.endswith(CRLF):
|
||||
line = line[:-2]
|
||||
elif line[-1] in CRLF:
|
||||
line = line[:-1]
|
||||
|
||||
lineno += 1
|
||||
encoded_line = ''
|
||||
prev = None
|
||||
linelen = len(line)
|
||||
# Now we need to examine every character to see if it needs to be
|
||||
# quopri encoded. BAW: again, string concatenation is inefficient.
|
||||
for j in range(linelen):
|
||||
c = line[j]
|
||||
prev = c
|
||||
if bqre.match(c):
|
||||
c = quote(c)
|
||||
elif j+1 == linelen:
|
||||
# Check for whitespace at end of line; special case
|
||||
if c not in ' \t':
|
||||
encoded_line += c
|
||||
prev = c
|
||||
continue
|
||||
# Check to see to see if the line has reached its maximum length
|
||||
if len(encoded_line) + len(c) >= maxlinelen:
|
||||
encoded_body += encoded_line + '=' + eol
|
||||
encoded_line = ''
|
||||
encoded_line += c
|
||||
# Now at end of line..
|
||||
if prev and prev in ' \t':
|
||||
# Special case for whitespace at end of file
|
||||
if lineno + 1 == len(lines):
|
||||
prev = quote(prev)
|
||||
if len(encoded_line) + len(prev) > maxlinelen:
|
||||
encoded_body += encoded_line + '=' + eol + prev
|
||||
else:
|
||||
encoded_body += encoded_line + prev
|
||||
# Just normal whitespace at end of line
|
||||
else:
|
||||
encoded_body += encoded_line + prev + '=' + eol
|
||||
encoded_line = ''
|
||||
# Now look at the line we just finished and it has a line ending, we
|
||||
# need to add eol to the end of the line.
|
||||
if lines[lineno].endswith(CRLF) or lines[lineno][-1] in CRLF:
|
||||
encoded_body += encoded_line + eol
|
||||
else:
|
||||
encoded_body += encoded_line
|
||||
encoded_line = ''
|
||||
return encoded_body
|
||||
|
||||
|
||||
# For convenience and backwards compatibility w/ standard base64 module
|
||||
body_encode = encode
|
||||
encodestring = encode
|
||||
|
||||
|
||||
|
||||
# BAW: I'm not sure if the intent was for the signature of this function to be
|
||||
# the same as base64MIME.decode() or not...
|
||||
def decode(encoded, eol=NL):
|
||||
"""Decode a quoted-printable string.
|
||||
|
||||
Lines are separated with eol, which defaults to \\n.
|
||||
"""
|
||||
if not encoded:
|
||||
return encoded
|
||||
# BAW: see comment in encode() above. Again, we're building up the
|
||||
# decoded string with string concatenation, which could be done much more
|
||||
# efficiently.
|
||||
decoded = ''
|
||||
|
||||
for line in encoded.splitlines():
|
||||
line = line.rstrip()
|
||||
if not line:
|
||||
decoded += eol
|
||||
continue
|
||||
|
||||
i = 0
|
||||
n = len(line)
|
||||
while i < n:
|
||||
c = line[i]
|
||||
if c != '=':
|
||||
decoded += c
|
||||
i += 1
|
||||
# Otherwise, c == "=". Are we at the end of the line? If so, add
|
||||
# a soft line break.
|
||||
elif i+1 == n:
|
||||
i += 1
|
||||
continue
|
||||
# Decode if in form =AB
|
||||
elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits:
|
||||
decoded += unquote(line[i:i+3])
|
||||
i += 3
|
||||
# Otherwise, not in form =AB, pass literally
|
||||
else:
|
||||
decoded += c
|
||||
i += 1
|
||||
|
||||
if i == n:
|
||||
decoded += eol
|
||||
# Special case if original string did not end with eol
|
||||
if not encoded.endswith(eol) and decoded.endswith(eol):
|
||||
decoded = decoded[:-1]
|
||||
return decoded
|
||||
|
||||
|
||||
# For convenience and backwards compatibility w/ standard base64 module
|
||||
body_decode = decode
|
||||
decodestring = decode
|
||||
|
||||
|
||||
|
||||
def _unquote_match(match):
|
||||
"""Turn a match in the form =AB to the ASCII character with value 0xab"""
|
||||
s = match.group(0)
|
||||
return unquote(s)
|
||||
|
||||
|
||||
# Header decoding is done a bit differently
|
||||
def header_decode(s):
|
||||
"""Decode a string encoded with RFC 2045 MIME header `Q' encoding.
|
||||
|
||||
This function does not parse a full MIME header value encoded with
|
||||
quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use
|
||||
the high level email.header class for that functionality.
|
||||
"""
|
||||
s = s.replace('_', ' ')
|
||||
return re.sub(r'=[a-fA-F0-9]{2}', _unquote_match, s)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 954 B |
Binary file not shown.
|
|
@ -1,19 +0,0 @@
|
|||
Return-Path: <bbb@zzz.org>
|
||||
Delivered-To: bbb@zzz.org
|
||||
Received: by mail.zzz.org (Postfix, from userid 889)
|
||||
id 27CEAD38CC; Fri, 4 May 2001 14:05:44 -0400 (EDT)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Content-Transfer-Encoding: 7bit
|
||||
Message-ID: <15090.61304.110929.45684@aaa.zzz.org>
|
||||
From: bbb@ddd.com (John X. Doe)
|
||||
To: bbb@zzz.org
|
||||
Subject: This is a test message
|
||||
Date: Fri, 4 May 2001 14:05:44 -0400
|
||||
|
||||
|
||||
Hi,
|
||||
|
||||
Do you like this message?
|
||||
|
||||
-Me
|
||||
|
|
@ -1,135 +0,0 @@
|
|||
MIME-version: 1.0
|
||||
From: ppp-request@zzz.org
|
||||
Sender: ppp-admin@zzz.org
|
||||
To: ppp@zzz.org
|
||||
Subject: Ppp digest, Vol 1 #2 - 5 msgs
|
||||
Date: Fri, 20 Apr 2001 20:18:00 -0400 (EDT)
|
||||
X-Mailer: Mailman v2.0.4
|
||||
X-Mailman-Version: 2.0.4
|
||||
Content-Type: multipart/mixed; boundary="192.168.1.2.889.32614.987812255.500.21814"
|
||||
|
||||
--192.168.1.2.889.32614.987812255.500.21814
|
||||
Content-type: text/plain; charset=us-ascii
|
||||
Content-description: Masthead (Ppp digest, Vol 1 #2)
|
||||
|
||||
Send Ppp mailing list submissions to
|
||||
ppp@zzz.org
|
||||
|
||||
To subscribe or unsubscribe via the World Wide Web, visit
|
||||
http://www.zzz.org/mailman/listinfo/ppp
|
||||
or, via email, send a message with subject or body 'help' to
|
||||
ppp-request@zzz.org
|
||||
|
||||
You can reach the person managing the list at
|
||||
ppp-admin@zzz.org
|
||||
|
||||
When replying, please edit your Subject line so it is more specific
|
||||
than "Re: Contents of Ppp digest..."
|
||||
|
||||
|
||||
--192.168.1.2.889.32614.987812255.500.21814
|
||||
Content-type: text/plain; charset=us-ascii
|
||||
Content-description: Today's Topics (5 msgs)
|
||||
|
||||
Today's Topics:
|
||||
|
||||
1. testing #1 (Barry A. Warsaw)
|
||||
2. testing #2 (Barry A. Warsaw)
|
||||
3. testing #3 (Barry A. Warsaw)
|
||||
4. testing #4 (Barry A. Warsaw)
|
||||
5. testing #5 (Barry A. Warsaw)
|
||||
|
||||
--192.168.1.2.889.32614.987812255.500.21814
|
||||
Content-Type: multipart/digest; boundary="__--__--"
|
||||
|
||||
--__--__--
|
||||
|
||||
Message: 1
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Content-Transfer-Encoding: 7bit
|
||||
Date: Fri, 20 Apr 2001 20:16:13 -0400
|
||||
To: ppp@zzz.org
|
||||
From: barry@digicool.com (Barry A. Warsaw)
|
||||
Subject: [Ppp] testing #1
|
||||
Precedence: bulk
|
||||
|
||||
|
||||
hello
|
||||
|
||||
|
||||
--__--__--
|
||||
|
||||
Message: 2
|
||||
Date: Fri, 20 Apr 2001 20:16:21 -0400
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Content-Transfer-Encoding: 7bit
|
||||
To: ppp@zzz.org
|
||||
From: barry@digicool.com (Barry A. Warsaw)
|
||||
Precedence: bulk
|
||||
|
||||
|
||||
hello
|
||||
|
||||
|
||||
--__--__--
|
||||
|
||||
Message: 3
|
||||
Date: Fri, 20 Apr 2001 20:16:25 -0400
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Content-Transfer-Encoding: 7bit
|
||||
To: ppp@zzz.org
|
||||
From: barry@digicool.com (Barry A. Warsaw)
|
||||
Subject: [Ppp] testing #3
|
||||
Precedence: bulk
|
||||
|
||||
|
||||
hello
|
||||
|
||||
|
||||
--__--__--
|
||||
|
||||
Message: 4
|
||||
Date: Fri, 20 Apr 2001 20:16:28 -0400
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Content-Transfer-Encoding: 7bit
|
||||
To: ppp@zzz.org
|
||||
From: barry@digicool.com (Barry A. Warsaw)
|
||||
Subject: [Ppp] testing #4
|
||||
Precedence: bulk
|
||||
|
||||
|
||||
hello
|
||||
|
||||
|
||||
--__--__--
|
||||
|
||||
Message: 5
|
||||
Date: Fri, 20 Apr 2001 20:16:32 -0400
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Content-Transfer-Encoding: 7bit
|
||||
To: ppp@zzz.org
|
||||
From: barry@digicool.com (Barry A. Warsaw)
|
||||
Subject: [Ppp] testing #5
|
||||
Precedence: bulk
|
||||
|
||||
|
||||
hello
|
||||
|
||||
|
||||
|
||||
|
||||
--__--__----
|
||||
--192.168.1.2.889.32614.987812255.500.21814
|
||||
Content-type: text/plain; charset=us-ascii
|
||||
Content-description: Digest Footer
|
||||
|
||||
_______________________________________________
|
||||
Ppp mailing list
|
||||
Ppp@zzz.org
|
||||
http://www.zzz.org/mailman/listinfo/ppp
|
||||
|
||||
|
||||
--192.168.1.2.889.32614.987812255.500.21814--
|
||||
|
||||
End of Ppp Digest
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
Return-Path: <bbb@zzz.org>
|
||||
Delivered-To: bbb@zzz.org
|
||||
Received: by mail.zzz.org (Postfix, from userid 889)
|
||||
id 27CEAD38CC; Fri, 4 May 2001 14:05:44 -0400 (EDT)
|
||||
Message-ID: <15090.61304.110929.45684@aaa.zzz.org>
|
||||
From: bbb@ddd.com (John X. Doe)
|
||||
To: bbb@zzz.org
|
||||
Subject: This is a test message
|
||||
Date: Fri, 4 May 2001 14:05:44 -0400
|
||||
|
||||
|
||||
Hi,
|
||||
|
||||
Do you like this message?
|
||||
|
||||
-Me
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
Return-Path: <barry@python.org>
|
||||
Delivered-To: barry@python.org
|
||||
Received: by mail.python.org (Postfix, from userid 889)
|
||||
id C2BF0D37C6; Tue, 11 Sep 2001 00:05:05 -0400 (EDT)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: multipart/mixed; boundary="h90VIIIKmx"
|
||||
Content-Transfer-Encoding: 7bit
|
||||
Message-ID: <15261.36209.358846.118674@anthem.python.org>
|
||||
From: barry@python.org (Barry A. Warsaw)
|
||||
To: barry@python.org
|
||||
Subject: a simple multipart
|
||||
Date: Tue, 11 Sep 2001 00:05:05 -0400
|
||||
X-Mailer: VM 6.95 under 21.4 (patch 4) "Artificial Intelligence" XEmacs Lucid
|
||||
X-Attribution: BAW
|
||||
X-Oblique-Strategy: Make a door into a window
|
||||
|
||||
|
||||
--h90VIIIKmx
|
||||
Content-Type: text/plain
|
||||
Content-Disposition: inline;
|
||||
filename="msg.txt"
|
||||
Content-Transfer-Encoding: 7bit
|
||||
|
||||
a simple kind of mirror
|
||||
to reflect upon our own
|
||||
|
||||
--h90VIIIKmx
|
||||
Content-Type: text/plain
|
||||
Content-Disposition: inline;
|
||||
filename="msg.txt"
|
||||
Content-Transfer-Encoding: 7bit
|
||||
|
||||
a simple kind of mirror
|
||||
to reflect upon our own
|
||||
|
||||
--h90VIIIKmx--
|
||||
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
From: foo
|
||||
Subject: bar
|
||||
To: baz
|
||||
MIME-Version: 1.0
|
||||
Content-Type: multipart/report; report-type=delivery-status;
|
||||
boundary="D1690A7AC1.996856090/mail.example.com"
|
||||
Message-Id: <20010803162810.0CA8AA7ACC@mail.example.com>
|
||||
|
||||
This is a MIME-encapsulated message.
|
||||
|
||||
--D1690A7AC1.996856090/mail.example.com
|
||||
Content-Type: text/plain
|
||||
|
||||
Yadda yadda yadda
|
||||
|
||||
--D1690A7AC1.996856090/mail.example.com
|
||||
|
||||
Yadda yadda yadda
|
||||
|
||||
--D1690A7AC1.996856090/mail.example.com
|
||||
Content-Type: message/rfc822
|
||||
|
||||
From: nobody@python.org
|
||||
|
||||
Yadda yadda yadda
|
||||
|
||||
--D1690A7AC1.996856090/mail.example.com--
|
||||
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
Return-Path: <barry@python.org>
|
||||
Delivered-To: barry@python.org
|
||||
MIME-Version: 1.0
|
||||
Content-Type: message/rfc822
|
||||
Content-Description: forwarded message
|
||||
Content-Transfer-Encoding: 7bit
|
||||
Message-ID: <15265.9482.641338.555352@python.org>
|
||||
From: barry@zope.com (Barry A. Warsaw)
|
||||
Sender: barry@python.org
|
||||
To: barry@python.org
|
||||
Subject: forwarded message from Barry A. Warsaw
|
||||
Date: Thu, 13 Sep 2001 17:28:42 -0400
|
||||
X-Mailer: VM 6.95 under 21.4 (patch 4) "Artificial Intelligence" XEmacs Lucid
|
||||
X-Attribution: BAW
|
||||
X-Oblique-Strategy: Be dirty
|
||||
X-Url: http://barry.wooz.org
|
||||
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=us-ascii
|
||||
Return-Path: <barry@python.org>
|
||||
Delivered-To: barry@python.org
|
||||
Message-ID: <15265.9468.713530.98441@python.org>
|
||||
From: barry@zope.com (Barry A. Warsaw)
|
||||
Sender: barry@python.org
|
||||
To: barry@python.org
|
||||
Subject: testing
|
||||
Date: Thu, 13 Sep 2001 17:28:28 -0400
|
||||
X-Mailer: VM 6.95 under 21.4 (patch 4) "Artificial Intelligence" XEmacs Lucid
|
||||
X-Attribution: BAW
|
||||
X-Oblique-Strategy: Spectrum analysis
|
||||
X-Url: http://barry.wooz.org
|
||||
|
||||
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
MIME-Version: 1.0
|
||||
From: Barry <barry@digicool.com>
|
||||
To: Dingus Lovers <cravindogs@cravindogs.com>
|
||||
Subject: Here is your dingus fish
|
||||
Date: Fri, 20 Apr 2001 19:35:02 -0400
|
||||
Content-Type: multipart/mixed; boundary="BOUNDARY"
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="us-ascii"
|
||||
|
||||
Hi there,
|
||||
|
||||
This is the dingus fish.
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: image/gif; name="dingusfish.gif"
|
||||
Content-Transfer-Encoding: base64
|
||||
content-disposition: attachment; filename="dingusfish.gif"
|
||||
|
||||
R0lGODdhAAEAAfAAAP///wAAACwAAAAAAAEAAQAC/oSPqcvtD6OctNqLs968+w+G4kiW5omm6sq2
|
||||
7gvH8kzX9o3n+s73/g8MCofEovGITGICTKbyCV0FDNOo9SqpQqpOrJfXzTQj2vD3TGtqL+NtGQ2f
|
||||
qTXmxzuOd7WXdcc9DyjU53ewFni4s0fGhdiYaEhGBelICTNoV1j5NUnFcrmUqemjNifJVWpaOqaI
|
||||
oFq3SspZsSraE7sHq3jr1MZqWvi662vxV4tD+pvKW6aLDOCLyur8PDwbanyDeq0N3DctbQYeLDvR
|
||||
RY6t95m6UB0d3mwIrV7e2VGNvjjffukeJp4w7F65KecGFsTHQGAygOrgrWs1jt28Rc88KESYcGLA
|
||||
/obvTkH6p+CinWJiJmIMqXGQwH/y4qk0SYjgQTczT3ajKZGfuI0uJ4kkVI/DT5s3/ejkxI0aT4Y+
|
||||
YTYgWbImUaXk9nlLmnSh1qJiJFl0OpUqRK4oOy7NyRQtHWofhoYVxkwWXKUSn0YsS+fUV6lhqfYb
|
||||
6ayd3Z5qQdG1B7bvQzaJjwUV2lixMUZ7JVsOlfjWVr/3NB/uFvnySBN6Dcb6rGwaRM3wsormw5cC
|
||||
M9NxWy/bWdufudCvy8bOAjXjVVwta/uO21sE5RHBCzNFXtgq9ORtH4eYjVP4Yryo026nvkFmCeyA
|
||||
B29efV6ravCMK5JwWd5897Qrx7ll38o6iHDZ/rXPR//feevhF4l7wjUGX3xq1eeRfM4RSJGBIV1D
|
||||
z1gKPkfWag3mVBVvva1RlX5bAJTPR/2YqNtw/FkIYYEi/pIZiAdpcxpoHtmnYYoZtvhUftzdx5ZX
|
||||
JSKDW405zkGcZzzGZ6KEv4FI224oDmijlEf+xp6MJK5ojY/ASeVUR+wsKRuJ+XFZ5o7ZeEime8t1
|
||||
ouUsU6YjF5ZtUihhkGfCdFQLWQFJ3UXxmElfhQnR+eCdcDbkFZp6vTRmj56ApCihn5QGpaToNZmR
|
||||
n3NVSpZcQpZ2KEONusaiCsKAug0wkQbJSFO+PTSjneGxOuFjPlUk3ovWvdIerjUg9ZGIOtGq/qeX
|
||||
eCYrrCX+1UPsgTKGGRSbzd5q156d/gpfbJxe66eD5iQKrXj7RGgruGxs62qebBHUKS32CKluCiqZ
|
||||
qh+pmehmEb71noAUoe5e9Zm17S7773V10pjrtG4CmuurCV/n6zLK5turWNhqOvFXbjhZrMD0YhKe
|
||||
wR0zOyuvsh6MWrGoIuzvyWu5y1WIFAqmJselypxXh6dKLNOKEB98L88bS2rkNqqlKzCNJp9c0G0j
|
||||
Gzh0iRrCbHSXmPR643QS+4rWhgFmnSbSuXCjS0xAOWkU2UdLqyuUNfHSFdUouy3bm5i5GnDM3tG8
|
||||
doJ4r5tqu3pPbRSVfvs8uJzeNXhp3n4j/tZ42SwH7eaWUUOjc3qFV9453UHTXZfcLH+OeNs5g36x
|
||||
lBnHvTm7EbMbLeuaLncao8vWCXimfo1o+843Ak6y4ChNeGntvAYvfLK4ezmoyNIbNCLTCXO9ZV3A
|
||||
E8/s88RczPzDwI4Ob7XZyl7+9Miban29h+tJZPrE21wgvBphDfrrfPdCTPKJD/y98L1rZwHcV6Jq
|
||||
Zab0metpuNIX/qAFPoz171WUaUb4HAhBSzHuHfjzHb3kha/2Cctis/ORArVHNYfFyYRH2pYIRzic
|
||||
isVOfPWD1b6mRTqpCRBozzof6UZVvFXRxWIr3GGrEviGYgyPMfahheiSaLs/9QeFu7oZ/ndSY8DD
|
||||
ya9x+uPed+7mxN2IzIISBOMLFYWVqC3Pew1T2nFuuCiwZS5/v6II10i4t1OJcUH2U9zxKodHsGGv
|
||||
Oa+zkvNUYUOa/TCCRutF9MzDwdlUMJADTCGSbDQ5OV4PTamDoPEi6Ecc/RF5RWwkcdSXvSOaDWSn
|
||||
I9LlvubFTQpuc6JKXLcKeb+xdbKRBnwREemXyjg6ME65aJiOuBgrktzykfPLJBKR9ClMavJ62/Ff
|
||||
BlNIyod9yX9wcSXexnXFpvkrbXk64xsx5Db7wXKP5fSgsvwIMM/9631VLBfkmtbHRXpqmtei52hG
|
||||
pUwSlo+BASQoeILDOBgREECxBBh5/iYmNsQ9dIv5+OI++QkqdsJPc3uykz5fkM+OraeekcQF7X4n
|
||||
B5S67za5U967PmooGQhUXfF7afXyCD7ONdRe17QogYjVx38uLwtrS6nhTnm15LQUnu9E2uK6CNI/
|
||||
1HOABj0ESwOjut4FEpFQpdNAm4K2LHnDWHNcmKB2ioKBogysVZtMO2nSxUdZ8Yk2kJc7URioLVI0
|
||||
YgmtIwZj4LoeKemgnOnbUdGnzZ4Oa6scqiolBGqS6RgWNLu0RMhcaE6rhhU4hiuqFXPAG8fGwTPW
|
||||
FKeLMtdVmXLSs5YJGF/YeVm7rREMlY3UYE+yCxbaMXX8y15m5zVHq6GOKDMynzII/jdUHdyVqIy0
|
||||
ifX2+r/EgtZcvRzSb72gU9ui87M2VecjKildW/aFqaYhKoryUjfB/g4qtyVuc60xFDGmCxwjW+qu
|
||||
zjuwl2GkOWn66+3QiiEctvd04OVvcCVzjgT7lrkvjVGKKHmmlDUKowSeikb5kK/mJReuWOxONx+s
|
||||
ULsl+Lqb0CVn0SrVyJ6wt4t6yTeSCafhPhAf0OXn6L60UMxiLolFAtmN35S2Ob1lZpQ1r/n0Qb5D
|
||||
oQ1zJiRVDgF8N3Q8TYfbi3DyWCy3lT1nxyBs6FT3S2GOzWRlxwKvlRP0RPJA9SjxEy0UoEnkA+M4
|
||||
cnzLMJrBGWLFEaaUb5lvpqbq/loOaU5+DFuHPxo82/OZuM8FXG3oVNZhtWpMpb/0Xu5m/LfLhHZQ
|
||||
7yuVI0MqZ7NE43imC8jH3IwGZlbPm0xkJYs7+2U48hXTsFSMqgGDvai0kLxyynKNT/waj+q1c1tz
|
||||
GjOpPBgdCSq3UKZxCSsqFIY+O6JbAWGWcV1pwqLyj5sGqCF1xb1F3varUWqrJv6cN3PrUXzijtfZ
|
||||
FshpBL3Xwr4GIPvU2N8EjrJgS1zl21rbXQMXeXc5jjFyrhpCzijSv/RQtyPSzHCFMhlME95fHglt
|
||||
pRsX+dfSQjUeHAlpWzJ5iOo79Ldnaxai6bXTcGO3fp07ri7HLEmXXPlYi8bv/qVxvNcdra6m7Rlb
|
||||
6JBTb5fd66VhFRjGArh2n7R1rDW4P5NOT9K0I183T2scYkeZ3q/VFyLb09U9ajzXBS8Kgkhc4mBS
|
||||
kYY9cy3Vy9lUnuNJH8HGIclUilwnBtjUOH0gteGOZ4c/XNrhXLSYDyxfnD8z1pDy7rYRvDolhnbe
|
||||
UMzxCZUs40s6s7UIvBnLgc0+vKuOkIXeOrDymlp+Zxra4MZLBbVrqD/jTJ597pDmnw5c4+DbyB88
|
||||
9Cg9DodYcSuMZT/114pptqc/EuTjRPvH/z5slzI3tluOEBBLqOXLOX+0I5929tO97wkvl/atCz+y
|
||||
xJrdwteW2FNW/NSmBP+f/maYtVs/bYyBC7Ox3jsYZHL05CIrBa/nS+b3bHfiYm4Ueil1YZZSgAUI
|
||||
fFZ1dxUmeA2oQRQ3RuGXNGLFV9/XbGFGPV6kfzk1TBBCd+izc7q1H+OHMJwmaBX2IQNYVAKHYepV
|
||||
SSGCe6CnbYHHETKGNe43EDvFgZr0gB/nVHPHZ80VV1ojOiI3XDvYIkl4ayo4bxQIgrFXWTvBI0nH
|
||||
VElWMuw2aLUWCRHHf8ymVCHjFlJnOSojfevCYyyyZDH0IcvHhrsnQ5O1OsWzONuVVKIxSxiFZ/tR
|
||||
fKDAf6xFTnw4O9Qig2VCfW2hJQrmMOuHW0W3dLQmCMO2ccdUd/xyfflH/olTiHZVdGwb8nIwRzSE
|
||||
J15jFlOJuBZBZ4CiyHyd2IFylFlB+HgHhYabhWOGwYO1ZH/Og1dtQlFMk352CGRSIFTapnWQEUtN
|
||||
l4zv8S0aaCFDyGCBqDUxZYpxGHX01y/JuH1xhn7TOCnNCI4eKDs5WGX4R425F4vF1o3BJ4vO0otq
|
||||
I3rimI7jJY1jISqnBxknCIvruF83mF5wN4X7qGLIhR8A2Vg0yFERSIXn9Vv3GHy3Vj/WIkKddlYi
|
||||
yIMv2I/VMjTLpW7pt05SWIZR0RPyxpB4SIUM9lBPGBl0GC7oSEEwRYLe4pJpZY2P0zbI1n+Oc44w
|
||||
qY3PUnmF0ixjVpDD/mJ9wpOBGTVgXlaCaZiPcIWK5NiKBIiPdGaQ0TWGvAiG7nMchdZb7Vgf8zNi
|
||||
MuMyzRdy/lePe9iC4TRx7WhhOQI/QiSVNAmAa2lT/piFbuh7ofJoYSZzrSZ1bvmWw3eN2nKUPVky
|
||||
uPN5/VRfohRd0VYZoqhKIlU6TXYhJxmPUIloAwc1bPmHEpaZYZORHNlXUJM07hATwHR8MJYqkwWR
|
||||
WaIezFhxSFlc8/Fq82hEnpeRozg3ULhhr9lAGtVEkCg5ZNRuuVleBPaZadhG0ZgkyPmDOTOKzViM
|
||||
YgOcpukKqQcbjAWS0IleQ2ROjdh6A+md1qWdBRSX7iSYgFRTtRmBpJioieXJiHfJiMGIR9fJOn8I
|
||||
MSfXYhspn4ooSa2mSAj4n+8Bmg03fBJZoPOJgsVZRxu1oOMRPXYYjdqjihFaEoZpXBREanuJoRI6
|
||||
cibFinq4ngUKh/wQd/H5ofYCZ0HJXR62opZFaAT0iFIZo4DIiUojkjeqKiuoZirKo5Y1a7AWckGa
|
||||
BkuYoD5lpDK6eUs6CkDqpETwl1EqpfhJpVeKpVl6EgUAADs=
|
||||
|
||||
--BOUNDARY--
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
MIME-Version: 1.0
|
||||
From: Barry Warsaw <barry@zope.com>
|
||||
To: Dingus Lovers <cravindogs@cravindogs.com>
|
||||
Subject: Lyrics
|
||||
Date: Fri, 20 Apr 2001 19:35:02 -0400
|
||||
Content-Type: multipart/mixed; boundary="BOUNDARY"
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="us-ascii"
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/html; charset="iso-8859-1"
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="iso-8859-2"
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="koi8-r"
|
||||
|
||||
|
||||
--BOUNDARY--
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
MIME-Version: 1.0
|
||||
From: Barry Warsaw <barry@zope.com>
|
||||
To: Dingus Lovers <cravindogs@cravindogs.com>
|
||||
Subject: Lyrics
|
||||
Date: Fri, 20 Apr 2001 19:35:02 -0400
|
||||
Content-Type: multipart/mixed; boundary="BOUNDARY"
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="us-ascii"
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/html; charset="iso-8859-1"
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="koi8-r"
|
||||
|
||||
|
||||
--BOUNDARY--
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
MIME-Version: 1.0
|
||||
From: Barry Warsaw <barry@zope.com>
|
||||
To: Dingus Lovers <cravindogs@cravindogs.com>
|
||||
Subject: Lyrics
|
||||
Date: Fri, 20 Apr 2001 19:35:02 -0400
|
||||
Content-Type: multipart/mixed; boundary="BOUNDARY"
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="us-ascii"
|
||||
Content-Transfer-Encoding: 7bit
|
||||
|
||||
This is a 7bit encoded message.
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/html; charset="iso-8859-1"
|
||||
Content-Transfer-Encoding: Quoted-Printable
|
||||
|
||||
=A1This is a Quoted Printable encoded message!
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="iso-8859-1"
|
||||
Content-Transfer-Encoding: Base64
|
||||
|
||||
VGhpcyBpcyBhIEJhc2U2NCBlbmNvZGVkIG1lc3NhZ2Uu
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="iso-8859-1"
|
||||
Content-Transfer-Encoding: Base64
|
||||
|
||||
VGhpcyBpcyBhIEJhc2U2NCBlbmNvZGVkIG1lc3NhZ2UuCg==
|
||||
|
||||
|
||||
--BOUNDARY
|
||||
Content-Type: text/plain; charset="iso-8859-1"
|
||||
|
||||
This has no Content-Transfer-Encoding: header.
|
||||
|
||||
--BOUNDARY--
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
Content-Type: message/rfc822
|
||||
MIME-Version: 1.0
|
||||
Subject: The enclosing message
|
||||
|
||||
Subject: An enclosed message
|
||||
|
||||
Here is the body of the message.
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue